index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding/strategy/AllCategoricalTEApplicationStrategy.java
|
package ai.h2o.targetencoding.strategy;
import water.fvec.Frame;
import java.util.Arrays;
public class AllCategoricalTEApplicationStrategy extends TEApplicationStrategy {
private Frame _frame;
private String[] _excludedColumnNames;
/**
* Constructor for selection of categorical columns strategy
* @param frame the frame selection is being done from
* @param excludedColumnNames the column names we want to exclude from the result
* ( i.e. response column for classification tasks , fold column in case it is categorical etc.)
*/
public AllCategoricalTEApplicationStrategy(Frame frame, String[] excludedColumnNames) {
_frame = frame;
_excludedColumnNames = excludedColumnNames;
}
public String[] getColumnsToEncode() {
return Arrays.stream(_frame.names())
.filter(columnName ->
_frame.vec(columnName).isCategorical()
&& ! Arrays.asList(_excludedColumnNames).contains(columnName)
)
.toArray(String[]::new);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding/strategy/TEApplicationStrategy.java
|
package ai.h2o.targetencoding.strategy;
import water.Iced;
/**
* Strategy that defines which columns of the frame should be encoded with TargetEncoder
*/
public abstract class TEApplicationStrategy extends Iced {
public abstract String[] getColumnsToEncode();
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/ai/h2o/targetencoding/strategy/ThresholdTEApplicationStrategy.java
|
package ai.h2o.targetencoding.strategy;
import water.fvec.Frame;
import java.util.Arrays;
/**
* Strategy that will select only categorical columns with cardinality of greater of equal than specified threshold
*/
public class ThresholdTEApplicationStrategy extends TEApplicationStrategy {
private Frame _frame;
private String[] _excludedColumnNames;
private long _threshold;
/**
* Constructor for selection of categorical columns strategy based on threshold value
* @param frame the frame selection is being done from
* @param excludedColumnNames the column names we want to exclude from the result
* ( i.e. response column for classification tasks , fold column in case it is categorical etc.)
* @param threshold categorical columns with higher cardinality than {@code threshold} value will be selected
*/
public ThresholdTEApplicationStrategy(Frame frame, long threshold, String[] excludedColumnNames) {
_frame = frame;
_excludedColumnNames = excludedColumnNames;
_threshold = threshold;
}
public String[] getColumnsToEncode() {
return Arrays.stream(_frame.names())
.filter(columnName ->
_frame.vec(columnName).isCategorical()
&& ! Arrays.asList(_excludedColumnNames).contains(columnName)
&& _frame.vec(columnName).cardinality() >= _threshold
)
.toArray(String[]::new);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/api
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/api/targetencoding/TargetEncoderAlgoRegister.java
|
package hex.api.targetencoding;
import ai.h2o.targetencoding.TargetEncoder;
import ai.h2o.targetencoding.TargetEncoderModel;
import hex.ModelBuilder;
import water.api.AlgoAbstractRegister;
import water.api.RestApiContext;
import water.api.SchemaServer;
public class TargetEncoderAlgoRegister extends AlgoAbstractRegister {
@Override
public void registerEndPoints(RestApiContext context) {
ModelBuilder targetEncoderModelBuilder = new TargetEncoder(true);
registerModelBuilder(context, targetEncoderModelBuilder, SchemaServer.getStableVersion());
context.registerEndpoint("target_encoder_transform", "GET /3/TargetEncoderTransform", TargetEncoderHandler.class, "transform",
"Transform using give TargetEncoderModel");
}
@Override
public String getName() {
return TargetEncoderModel.ALGO_NAME;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/api
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/api/targetencoding/TargetEncoderHandler.java
|
package hex.api.targetencoding;
import ai.h2o.targetencoding.BlendingParams;
import ai.h2o.targetencoding.TargetEncoderModel;
import hex.schemas.TargetEncoderTransformParametersV3;
import water.Iced;
import water.Key;
import water.api.Handler;
import water.api.schemas3.KeyV3;
import water.fvec.Frame;
public class TargetEncoderHandler extends Handler {
@SuppressWarnings("unused")
public KeyV3.FrameKeyV3 transform(final int version, final TargetEncoderTransformParametersV3 parametersV3) {
final TargetEncoderTransformParameters parameters = new TargetEncoderTransformParameters();
parametersV3.fillImpl(parameters);
final TargetEncoderModel model = parameters._model.get();
final boolean asTraining = parameters._as_training;
final double noise = parameters._noise < -1 ? model._parms._noise : parameters._noise;
final BlendingParams blendingParams = parameters._blending
? new BlendingParams(
parameters._inflection_point < 0 ? model._parms._inflection_point : parameters._inflection_point,
parameters._smoothing < 0 ? model._parms._smoothing : parameters._smoothing
)
: null;
final Frame transformedFrame = model.transform(
parameters._frame.get(),
asTraining,
TargetEncoderModel.NO_FOLD,
blendingParams,
noise
);
return new KeyV3.FrameKeyV3(transformedFrame._key);
}
public static class TargetEncoderTransformParameters extends Iced<TargetEncoderTransformParameters> {
public Key<TargetEncoderModel> _model;
public Key<Frame> _frame;
public boolean _as_training;
public boolean _blending;
public double _inflection_point = -1;
public double _smoothing = -1;
public double _noise = -2; // use -2 for not-provided (-1 already means AUTO, and 0 means disabled).
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/schemas/TargetEncoderModelOutputV3.java
|
package hex.schemas;
import ai.h2o.targetencoding.ColumnsMapping;
import ai.h2o.targetencoding.TargetEncoderModel;
import water.api.API;
import water.api.schemas3.ModelOutputSchemaV3;
import water.api.schemas3.SchemaV3;
public class TargetEncoderModelOutputV3 extends ModelOutputSchemaV3<TargetEncoderModel.TargetEncoderOutput, TargetEncoderModelOutputV3> {
public static class ColumnsMappingV3 extends SchemaV3<ColumnsMapping, ColumnsMappingV3> {
@API(help = "Input column(s) from the same encoding group.")
public String[] from;
@API(help = "Output column(s) generated by the application of target encoding to the `from` group.")
public String[] to;
}
@API(help = "Mapping between input column(s) and their corresponding target encoded output column(s). " +
"Please note that there can be multiple columns on the input/from side if columns grouping was used, " +
"and there can also be multiple columns on the output/to side if the target was multiclass.",
direction = API.Direction.OUTPUT)
public ColumnsMappingV3[] input_to_output_columns;
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/schemas/TargetEncoderModelV3.java
|
package hex.schemas;
import ai.h2o.targetencoding.TargetEncoderModel;
import water.api.schemas3.ModelSchemaV3;
public class TargetEncoderModelV3 extends ModelSchemaV3<TargetEncoderModel, TargetEncoderModelV3, TargetEncoderModel.TargetEncoderParameters, TargetEncoderV3.TargetEncoderParametersV3,
TargetEncoderModel.TargetEncoderOutput, TargetEncoderModelOutputV3> {
@Override
public TargetEncoderModelOutputV3 createOutputSchema() {
return new TargetEncoderModelOutputV3();
}
@Override
public TargetEncoderV3.TargetEncoderParametersV3 createParametersSchema() {
return new TargetEncoderV3.TargetEncoderParametersV3();
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/schemas/TargetEncoderTransformParametersV3.java
|
package hex.schemas;
import ai.h2o.targetencoding.TargetEncoderModel;
import hex.api.targetencoding.TargetEncoderHandler.TargetEncoderTransformParameters;
import water.api.API;
import water.api.schemas3.KeyV3;
import water.api.schemas3.SchemaV3;
public class TargetEncoderTransformParametersV3 extends SchemaV3<TargetEncoderTransformParameters, TargetEncoderTransformParametersV3> {
@API(help = "Target Encoder model to use.")
public KeyV3.ModelKeyV3<TargetEncoderModel> model;
@API(help = "Frame to transform.")
public KeyV3.FrameKeyV3 frame;
@API(help = "Force encoding mode for training data: when using a leakage handling strategy different from None, " +
"training data should be transformed with this flag set to true (Defaults to false).")
public boolean as_training;
@API(help = "Enables or disables blending. Defaults to the value assigned at model creation.")
public boolean blending;
@API(help = "Inflection point. Defaults to the value assigned at model creation.")
public double inflection_point;
@API(help = "Smoothing. Defaults to the value assigned at model creation.")
public double smoothing;
@API(help = "Noise. Defaults to the value assigned at model creation.")
public double noise;
}
|
0
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-target-encoder/3.46.0.7/hex/schemas/TargetEncoderV3.java
|
package hex.schemas;
import ai.h2o.targetencoding.TargetEncoderModel.DataLeakageHandlingStrategy;
import ai.h2o.targetencoding.TargetEncoder;
import ai.h2o.targetencoding.TargetEncoderModel;
import water.api.API;
import water.api.EnumValuesProvider;
import water.api.schemas3.ModelParametersSchemaV3;
import java.util.ArrayList;
import java.util.List;
public class TargetEncoderV3 extends ModelBuilderSchema<TargetEncoder, TargetEncoderV3, TargetEncoderV3.TargetEncoderParametersV3> {
public static class TargetEncoderParametersV3 extends ModelParametersSchemaV3<TargetEncoderModel.TargetEncoderParameters, TargetEncoderParametersV3> {
@API(help = "List of categorical columns or groups of categorical columns to encode. " +
"When groups of columns are specified, each group is encoded as a single column (interactions are created internally).",
level = API.Level.critical)
public String[][] columns_to_encode;
@API(help = "If true, the original non-encoded categorical features will remain in the result frame.",
level = API.Level.critical)
public boolean keep_original_categorical_columns;
@API(help = "If true, enables blending of posterior probabilities (computed for a given categorical value) " +
"with prior probabilities (computed on the entire set). " +
"This allows to mitigate the effect of categorical values with small cardinality. " +
"The blending effect can be tuned using the `inflection_point` and `smoothing` parameters.",
level = API.Level.secondary)
public boolean blending;
@API(help = "Inflection point of the sigmoid used to blend probabilities (see `blending` parameter). " +
"For a given categorical value, if it appears less that `inflection_point` in a data sample, " +
"then the influence of the posterior probability will be smaller than the prior.",
level = API.Level.secondary)
public double inflection_point;
@API(help = "Smoothing factor corresponds to the inverse of the slope at the inflection point " +
"on the sigmoid used to blend probabilities (see `blending` parameter). " +
"If smoothing tends towards 0, then the sigmoid used for blending turns into a Heaviside step function.",
level = API.Level.secondary)
public double smoothing;
@API(help = "Data leakage handling strategy used to generate the encoding. Supported options are:\n" +
"1) \"none\" (default) - no holdout, using the entire training frame.\n" +
"2) \"leave_one_out\" - current row's response value is subtracted from the per-level frequencies pre-calculated on the entire training frame.\n" +
"3) \"k_fold\" - encodings for a fold are generated based on out-of-fold data.\n",
valuesProvider = DataLeakageHandlingStrategyProvider.class,
level = API.Level.secondary)
public DataLeakageHandlingStrategy data_leakage_handling;
@API(help = "The amount of noise to add to the encoded column. " +
"Use 0 to disable noise, and -1 (=AUTO) to let the algorithm determine a reasonable amount of noise.",
direction = API.Direction.INPUT, gridable = true, level = API.Level.expert)
public double noise;
@API(help = "Seed used to generate the noise. By default, the seed is chosen randomly.",
direction = API.Direction.INPUT, level = API.Level.expert)
public long seed;
@Override
public String[] fields() {
final List<String> params = new ArrayList<>();
params.add("model_id");
params.add("training_frame");
params.add("fold_column");
params.add("response_column");
params.add("ignored_columns");
params.addAll(extractDeclaredApiParameters(getClass()));
return params.toArray(new String[0]);
}
}
public static final class DataLeakageHandlingStrategyProvider extends EnumValuesProvider<DataLeakageHandlingStrategy> {
public DataLeakageHandlingStrategyProvider() { super(DataLeakageHandlingStrategy.class); }
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/ai/h2o/xgboost4j
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/ai/h2o/xgboost4j/java/BoosterWrapper.java
|
package ai.h2o.xgboost4j.java;
import hex.tree.xgboost.util.BoosterHelper;
import java.util.Map;
/**
* Wrapper to expose package private methods
*/
public class BoosterWrapper {
private final Booster booster;
public BoosterWrapper(
byte[] checkpointBoosterBytes,
Map<String, Object> params,
DMatrix train,
DMatrix valid
) throws XGBoostError {
if (checkpointBoosterBytes != null) {
booster = BoosterHelper.loadModel(checkpointBoosterBytes);
booster.setParams(params);
} else {
DMatrix[] cacheMats = valid == null ? new DMatrix[]{train} : new DMatrix[]{train, valid};
booster = Booster.newBooster(params, cacheMats);
booster.loadRabitCheckpoint();
}
booster.saveRabitCheckpoint();
}
public void update(DMatrix dtrain, int iter) throws XGBoostError {
booster.update(dtrain, iter);
}
public String evalSet(DMatrix dtrain, DMatrix dvalid, int iter) throws XGBoostError {
if (dvalid == null) {
return booster.evalSet(new DMatrix[]{dtrain}, new String[]{"train"}, iter);
} else {
return booster.evalSet(new DMatrix[]{dtrain, dvalid}, new String[]{"train", "valid"}, iter);
}
}
public void saveRabitCheckpoint() throws XGBoostError {
booster.saveRabitCheckpoint();
}
public byte[] toByteArray() throws XGBoostError {
return booster.toByteArray();
}
public void dispose() {
booster.dispose();
}
public Booster getBooster() {
return booster;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/biz/k11i/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/biz/k11i/xgboost/tree/NodeHelper.java
|
package biz.k11i.xgboost.tree;
import biz.k11i.xgboost.util.ModelReader;
import java.io.IOException;
/**
* This class exposes some package-private APIs of RegTreeImpl.Node and provides additional helper methods.
* These methods can eventually be folded back into the original class.
*/
public class NodeHelper {
public static RegTreeNode read(ModelReader reader) throws IOException {
return new RegTreeImpl.Node(reader);
}
public static boolean isEqual(RegTreeNode left, RegTreeNode right) {
return left == right || ( // also covers null case
left.getParentIndex() == right.getParentIndex()
&& left.getLeftChildIndex() == right.getLeftChildIndex()
&& left.getRightChildIndex() == right.getRightChildIndex()
&& left.getSplitIndex() == right.getSplitIndex()
&& Float.compare(left.getLeafValue(), right.getLeafValue()) == 0
&& Float.compare(left.getSplitCondition(), right.getSplitCondition()) == 0
&& left.default_left() == right.default_left()
);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/api
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/api/xgboost/RegisterRestApi.java
|
package hex.api.xgboost;
import hex.tree.xgboost.XGBoost;
import hex.tree.xgboost.XGBoostExtension;
import hex.tree.xgboost.remote.RemoteXGBoostHandler;
import water.ExtensionManager;
import water.api.AlgoAbstractRegister;
import water.api.RestApiContext;
import water.api.SchemaServer;
import java.util.Collections;
import java.util.List;
public class RegisterRestApi extends AlgoAbstractRegister {
@Override
public void registerEndPoints(RestApiContext context) {
XGBoostExtension ext = (XGBoostExtension) ExtensionManager.getInstance().getCoreExtension(XGBoostExtension.NAME);
if (ext != null) {
// We might not have the extension available if running from h2o-bindings
ext.logNativeLibInfo();
}
XGBoost xgBoostMB = new XGBoost(true);
// Register XGBoost model builder REST API
registerModelBuilder(context, xgBoostMB, SchemaServer.getStableVersion());
// Register Remote XGBoost execution
context.registerEndpoint(
"remote_xgb_init", "POST /3/XGBoostExecutor.init",
RemoteXGBoostHandler.class, "init",
"Remote XGBoost execution - init"
);
context.registerEndpoint(
"remote_xgb_setup", "POST /3/XGBoostExecutor.setup",
RemoteXGBoostHandler.class, "setup",
"Remote XGBoost execution - setup"
);
context.registerEndpoint(
"remote_xgb_update", "POST /3/XGBoostExecutor.update",
RemoteXGBoostHandler.class, "update",
"Remote XGBoost execution - update"
);
context.registerEndpoint(
"remote_xgb_metric", "POST /3/XGBoostExecutor.getEvalMetric",
RemoteXGBoostHandler.class, "getEvalMetric",
"Remote XGBoost execution - getEvalMetric"
);
context.registerEndpoint(
"remote_xgb_booster", "POST /3/XGBoostExecutor.getBooster",
RemoteXGBoostHandler.class, "getBooster",
"Remote XGBoost execution - get booster"
);
context.registerEndpoint(
"remote_xgb_cleanup", "POST /3/XGBoostExecutor.cleanup",
RemoteXGBoostHandler.class, "cleanup",
"Remote XGBoost execution - cleanup"
);
}
@Override
public String getName() {
return "XGBoost";
}
@Override
public List<String> getRequiredCoreExtensions() {
return Collections.singletonList(XGBoostExtension.NAME);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/ensemble/XGBoostMetalearnerProvider.java
|
package hex.ensemble;
import hex.genmodel.utils.DistributionFamily;
import hex.schemas.XGBoostV3;
public class XGBoostMetalearnerProvider implements MetalearnerProvider<XGBoostMetalearnerProvider.XGBoostMetalearner> {
static class XGBoostMetalearner extends Metalearners.MetalearnerWithDistribution {
public XGBoostMetalearner() {
super(Algorithm.xgboost.name());
}
}
@Override
public String getName() {
return Metalearner.Algorithm.xgboost.name();
}
@Override
public XGBoostMetalearner newInstance() {
return new XGBoostMetalearner();
}
@Override
public XGBoostV3.XGBoostParametersV3 newParametersSchemaInstance() {
return new XGBoostV3.XGBoostParametersV3();
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/schemas/XGBoostExecReqV3.java
|
package hex.schemas;
import hex.tree.xgboost.exec.XGBoostExecReq;
import org.apache.commons.codec.binary.Base64;
import water.AutoBuffer;
import water.Iced;
import water.Key;
import water.api.API;
import water.api.Schema;
import water.api.schemas3.KeyV3;
public class XGBoostExecReqV3 extends Schema<Iced, XGBoostExecReqV3> {
public XGBoostExecReqV3(Key key, XGBoostExecReq req) {
this.key = KeyV3.make(key);
this.data = Base64.encodeBase64String(AutoBuffer.serializeBootstrapFreezable(req));
}
public XGBoostExecReqV3() {
}
@API(help="Identifier")
public KeyV3 key;
@API(help="Arbitrary request data stored as Base64 encoded binary")
public String data;
@SuppressWarnings("unchecked")
public <T> T readData() {
return (T) AutoBuffer.deserializeBootstrapFreezable(Base64.decodeBase64(data));
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/schemas/XGBoostExecRespV3.java
|
package hex.schemas;
import water.BootstrapFreezable;
import org.apache.commons.codec.binary.Base64;
import water.AutoBuffer;
import water.Iced;
import water.Key;
import water.api.API;
import water.api.Schema;
import water.api.schemas3.KeyV3;
public class XGBoostExecRespV3 extends Schema<Iced, XGBoostExecRespV3> {
@API(help="Identifier")
public KeyV3 key;
@API(help="Arbitrary response data stored as Base64 encoded binary")
public String data;
public XGBoostExecRespV3() {}
public XGBoostExecRespV3(Key key) {
this.key = KeyV3.make(key);
this.data = "";
}
public XGBoostExecRespV3(Key key, BootstrapFreezable<?> data) {
this.key = KeyV3.make(key);
this.data = Base64.encodeBase64String(AutoBuffer.serializeBootstrapFreezable(data));
}
@Override
public String toString() {
return "XGBoostExecRespV3{" +
"key=" + key +
'}';
}
public <T> T readData() {
if (data.length() > 0) {
return (T) AutoBuffer.deserializeBootstrapFreezable(Base64.decodeBase64(data));
} else {
return null;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/schemas/XGBoostModelV3.java
|
package hex.schemas;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostOutput;
import water.api.API;
import water.api.schemas3.ModelOutputSchemaV3;
import water.api.schemas3.ModelSchemaV3;
import water.api.schemas3.TwoDimTableV3;
public class XGBoostModelV3 extends ModelSchemaV3<
XGBoostModel,
XGBoostModelV3,
XGBoostModel.XGBoostParameters,
XGBoostV3.XGBoostParametersV3,
XGBoostOutput,
XGBoostModelV3.XGBoostModelOutputV3> {
public static final class XGBoostModelOutputV3 extends ModelOutputSchemaV3<XGBoostOutput, XGBoostModelOutputV3> {
@API(help="Variable Importances", direction=API.Direction.OUTPUT, level = API.Level.secondary)
TwoDimTableV3 variable_importances;
@API(help="Variable Importances - Cover", direction=API.Direction.OUTPUT, level = API.Level.secondary)
TwoDimTableV3 variable_importances_cover;
@API(help="Variable Importances - Frequency", direction=API.Direction.OUTPUT, level = API.Level.secondary)
TwoDimTableV3 variable_importances_frequency;
@API(help="XGBoost Native Parameters", direction=API.Direction.OUTPUT, level = API.Level.secondary)
TwoDimTableV3 native_parameters;
@API(help="Sparse", direction=API.Direction.OUTPUT, level = API.Level.secondary)
boolean sparse;
}
public XGBoostV3.XGBoostParametersV3 createParametersSchema() { return new XGBoostV3.XGBoostParametersV3(); }
public XGBoostModelOutputV3 createOutputSchema() { return new XGBoostModelOutputV3(); }
//==========================
// Custom adapters go here
// Version&Schema-specific filling into the impl
@Override public XGBoostModel createImpl() {
XGBoostV3.XGBoostParametersV3 p = this.parameters;
XGBoostModel.XGBoostParameters parms = p.createImpl();
return new XGBoostModel(model_id.key(), parms, new XGBoostOutput(null), null, null);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/schemas/XGBoostV3.java
|
package hex.schemas;
import hex.tree.CalibrationHelper;
import hex.tree.xgboost.XGBoost;
import hex.tree.xgboost.XGBoostModel.XGBoostParameters;
import water.api.API;
import water.api.schemas3.*;
public class XGBoostV3 extends ModelBuilderSchema<XGBoost,XGBoostV3,XGBoostV3.XGBoostParametersV3> {
public static final class XGBoostParametersV3 extends ModelParametersSchemaV3<XGBoostParameters, XGBoostParametersV3> {
static public String[] fields = new String[] {
"model_id",
"training_frame",
"validation_frame",
"nfolds",
"keep_cross_validation_models",
"keep_cross_validation_predictions",
"keep_cross_validation_fold_assignment",
"score_each_iteration",
"fold_assignment",
"fold_column",
"response_column",
"ignored_columns",
"ignore_const_cols",
"offset_column",
"weights_column",
"stopping_rounds",
"stopping_metric",
"stopping_tolerance",
"max_runtime_secs",
"seed",
"distribution",
"tweedie_power",
"categorical_encoding",
"quiet_mode",
"checkpoint",
"export_checkpoints_dir",
"custom_metric_func",
// model specific
"ntrees",
"max_depth",
"min_rows", "min_child_weight",
"learn_rate", "eta",
"sample_rate", "subsample",
"col_sample_rate", "colsample_bylevel",
"col_sample_rate_per_tree", "colsample_bytree",
"colsample_bynode",
"max_abs_leafnode_pred", "max_delta_step",
"monotone_constraints",
"interaction_constraints",
"score_tree_interval",
"min_split_improvement", "gamma",
//runtime
"nthread",
"save_matrix_directory",
"build_tree_one_node",
"parallelize_cross_validation",
//model calibration
"calibrate_model",
"calibration_frame",
"calibration_method",
//lightgbm only
"max_bins",
"max_leaves",
//dart
"sample_type",
"normalize_type",
"rate_drop",
"one_drop",
"skip_drop",
//xgboost only
"tree_method",
"grow_policy",
"booster",
"reg_lambda",
"reg_alpha",
"dmatrix_type",
"backend",
"gpu_id",
"gainslift_bins",
"auc_type",
"scale_pos_weight",
"eval_metric",
"score_eval_metric_only"
};
@API(help="(same as n_estimators) Number of trees.", gridable = true)
public int ntrees;
@API(help="Maximum tree depth (0 for unlimited).", gridable = true)
public int max_depth;
@API(help="(same as min_child_weight) Fewest allowed (weighted) observations in a leaf.", gridable = true)
public double min_rows;
@API(help="(same as min_rows) Fewest allowed (weighted) observations in a leaf.", gridable = true, level = API.Level.expert)
public double min_child_weight;
@API(help="(same as eta) Learning rate (from 0.0 to 1.0)", gridable = true, level = API.Level.expert)
public double learn_rate;
@API(help="(same as learn_rate) Learning rate (from 0.0 to 1.0)", gridable = true)
public double eta;
@API(help = "(same as subsample) Row sample rate per tree (from 0.0 to 1.0)", gridable = true)
public double sample_rate;
@API(help = "(same as sample_rate) Row sample rate per tree (from 0.0 to 1.0)", gridable = true, level = API.Level.expert)
public double subsample;
@API(help="(same as colsample_bylevel) Column sample rate (from 0.0 to 1.0)", gridable = true)
public double col_sample_rate;
@API(help="(same as col_sample_rate) Column sample rate (from 0.0 to 1.0)", gridable = true, level = API.Level.expert)
public double colsample_bylevel;
@API(help = "(same as colsample_bytree) Column sample rate per tree (from 0.0 to 1.0)", level = API.Level.secondary, gridable = true)
public double col_sample_rate_per_tree;
@API(help = "(same as col_sample_rate_per_tree) Column sample rate per tree (from 0.0 to 1.0)", level = API.Level.expert, gridable = true)
public double colsample_bytree;
@API(help = "Column sample rate per tree node (from 0.0 to 1.0)", level = API.Level.secondary, gridable = true)
public double colsample_bynode;
@API(help = "A mapping representing monotonic constraints. Use +1 to enforce an increasing constraint and -1 to specify a decreasing constraint.", level = API.Level.secondary)
public KeyValueV3[] monotone_constraints;
@API(help="(same as max_delta_step) Maximum absolute value of a leaf node prediction", level = API.Level.expert, gridable = true)
public float max_abs_leafnode_pred;
@API(help="(same as max_abs_leafnode_pred) Maximum absolute value of a leaf node prediction", level = API.Level.expert, gridable = true)
public float max_delta_step;
@API(help="Score the model after every so many trees. Disabled if set to 0.", level = API.Level.secondary, gridable = false)
public int score_tree_interval;
@API(help = "Seed for pseudo random number generator (if applicable)", gridable = true)
public long seed;
@API(help="(same as gamma) Minimum relative improvement in squared error reduction for a split to happen", level = API.Level.secondary, gridable = true)
public float min_split_improvement;
@API(help="(same as min_split_improvement) Minimum relative improvement in squared error reduction for a split to happen", level = API.Level.expert, gridable = true)
public float gamma;
@API(help = "Number of parallel threads that can be used to run XGBoost. Cannot exceed H2O cluster limits (-nthreads parameter). Defaults to maximum available", level = API.Level.expert)
public int nthread;
@API(help="Run on one node only; no network overhead but fewer cpus used. Suitable for small datasets.", level = API.Level.expert, gridable = false)
public boolean build_tree_one_node;
@API(help = "Directory where to save matrices passed to XGBoost library. Useful for debugging.", level = API.Level.expert)
public String save_matrix_directory;
@API(help="Use Platt Scaling (default) or Isotonic Regression to calculate calibrated class probabilities. Calibration can provide more accurate estimates of class probabilities.", level = API.Level.expert)
public boolean calibrate_model;
@API(help="Data for model calibration", level = API.Level.expert, direction = API.Direction.INOUT)
public KeyV3.FrameKeyV3 calibration_frame;
@API(help="Calibration method to use", values = {"AUTO", "PlattScaling", "IsotonicRegression"}, level = API.Level.expert, direction = API.Direction.INOUT)
public CalibrationHelper.CalibrationMethod calibration_method;
@API(help = "For tree_method=hist only: maximum number of bins", level = API.Level.expert, gridable = true)
public int max_bins;
@API(help = "For tree_method=hist only: maximum number of leaves", level = API.Level.secondary, gridable = true)
public int max_leaves;
@API(help="Tree method", values = { "auto", "exact", "approx", "hist"}, level = API.Level.secondary, gridable = true)
public XGBoostParameters.TreeMethod tree_method;
@API(help="Grow policy - depthwise is standard GBM, lossguide is LightGBM", values = { "depthwise", "lossguide"}, level = API.Level.secondary, gridable = true)
public XGBoostParameters.GrowPolicy grow_policy;
@API(help="Booster type", values = { "gbtree", "gblinear", "dart"}, level = API.Level.expert, gridable = true)
public XGBoostParameters.Booster booster;
@API(help = "L2 regularization", level = API.Level.expert, gridable = true)
public float reg_lambda;
@API(help = "L1 regularization", level = API.Level.expert, gridable = true)
public float reg_alpha;
@API(help="Enable quiet mode", level = API.Level.expert, gridable = false)
public boolean quiet_mode;
@API(help="For booster=dart only: sample_type", values = { "uniform", "weighted"}, level = API.Level.expert, gridable = true)
public XGBoostParameters.DartSampleType sample_type;
@API(help="For booster=dart only: normalize_type", values = { "tree", "forest"}, level = API.Level.expert, gridable = true)
public XGBoostParameters.DartNormalizeType normalize_type;
@API(help="For booster=dart only: rate_drop (0..1)", level = API.Level.expert, gridable = true)
public float rate_drop;
@API(help="For booster=dart only: one_drop", level = API.Level.expert, gridable = true)
public boolean one_drop;
@API(help="For booster=dart only: skip_drop (0..1)", level = API.Level.expert, gridable = true)
public float skip_drop;
@API(help="Type of DMatrix. For sparse, NAs and 0 are treated equally.", values = { "auto", "dense", "sparse" }, level = API.Level.secondary, gridable = true)
public XGBoostParameters.DMatrixType dmatrix_type;
@API(help="Backend. By default (auto), a GPU is used if available.", values = { "auto", "gpu", "cpu" }, level = API.Level.expert, gridable = true)
public XGBoostParameters.Backend backend;
@API(help="Which GPU(s) to use. ", level = API.Level.expert, gridable = false)
public int[] gpu_id;
@API(help="A set of allowed column interactions.", level= API.Level.expert)
public String[][] interaction_constraints;
@API(help="Controls the effect of observations with positive labels in relation to the observations with negative labels on gradient calculation. Useful for imbalanced problems.", level= API.Level.expert, gridable = true)
public float scale_pos_weight;
@API(help="Specification of evaluation metric that will be passed to the native XGBoost backend.", level= API.Level.expert, gridable = true)
public String eval_metric;
@API(help="If enabled, score only the evaluation metric. This can make model training faster if scoring is frequent (eg. each iteration).", level= API.Level.expert, gridable = true)
public boolean score_eval_metric_only;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/BoosterParms.java
|
package hex.tree.xgboost;
import water.H2O;
import water.Iced;
import water.util.IcedHashMapGeneric;
import water.util.TwoDimTable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Iced Wrapper around Booster parameter map. The main purpose is to avoid mistakes when using the parameter
* object directly: this class ensures that returned parameters will be localized.
*/
public class BoosterParms extends Iced<BoosterParms> {
private IcedHashMapGeneric.IcedHashMapStringObject _parms;
public static BoosterParms fromMap(Map<String, Object> map) {
BoosterParms bp = new BoosterParms();
bp._parms = new IcedHashMapGeneric.IcedHashMapStringObject();
bp._parms.putAll(map);
return bp;
}
/**
* @return localized Booster parameters
*/
public Map<String, Object> get() {
Map<String, Object> params = localizeDecimalParams(_parms);
addNodeSpecificParams(params);
return Collections.unmodifiableMap(params);
}
private static void addNodeSpecificParams(final Map<String, Object> params) {
final String sysGpuId = H2O.getSysProperty("xgboost.gpu.id", null);
if (sysGpuId != null) {
params.put("gpu_id", sysGpuId);
}
}
/**
* Iterates over a set of parameters and applies locale-specific formatting
* to decimal ones (Floats and Doubles).
*
* @param params Parameters to localize
* @return Map with localized parameter values
*/
private static Map<String, Object> localizeDecimalParams(final Map<String, Object> params) {
Map<String, Object> localized = new HashMap<>(params.size());
for (String key : params.keySet()) {
final Object value = params.get(key);
final Object newValue;
if (value instanceof Float || value instanceof Double) {
newValue = value.toString();
} else
newValue = value;
localized.put(key, newValue);
}
return localized;
}
public TwoDimTable toTwoDimTable() {
final int rows = _parms.size();
TwoDimTable table = new TwoDimTable(
"Native XGBoost Parameters", null,
new String[rows],
new String[]{"Name", "Value"},
new String[]{"String", "String"},
new String[]{"%s", "%s"},
"");
int row = 0;
for (Map.Entry<String, Object> p : _parms.entrySet()) {
table.set(row, 0, p.getKey());
table.set(row, 1, p.getValue());
row++;
}
assert row == rows;
return table;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/EvalMetric.java
|
package hex.tree.xgboost;
import water.BootstrapFreezable;
import water.Iced;
import java.util.Objects;
public final class EvalMetric extends Iced<EvalMetric> implements BootstrapFreezable<EvalMetric> {
public final String _name;
public final double _trainValue;
public final double _validValue;
public EvalMetric(String name, double trainValue, double validValue) {
_name = name;
_trainValue = trainValue;
_validValue = validValue;
}
private EvalMetric(String name) {
this(name, Double.NaN, Double.NaN);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EvalMetric that = (EvalMetric) o;
return Double.compare(that._trainValue, _trainValue) == 0 && Double.compare(that._validValue, _validValue) == 0 && Objects.equals(_name, that._name);
}
@Override
public int hashCode() {
return Objects.hash(_name, _trainValue, _validValue);
}
public static EvalMetric empty(String name) {
return new EvalMetric(name);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/MemoryCheck.java
|
package hex.tree.xgboost;
import oshi.SystemInfo;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HardwareAbstractionLayer;
import water.util.PrettyPrint;
public class MemoryCheck {
public static Report runCheck(double offHeapRatio) {
SystemInfo systemInfo = new SystemInfo();
HardwareAbstractionLayer hardware = systemInfo.getHardware();
GlobalMemory globalMemory = hardware.getMemory();
Runtime runtime = Runtime.getRuntime();
long available = globalMemory.getAvailable();
long availableOffHeap = Math.max(available - (runtime.maxMemory() - runtime.totalMemory()), 0);
long desiredOffHeap = (long) (runtime.maxMemory() * offHeapRatio);
return new Report(availableOffHeap, desiredOffHeap);
}
public static class Report {
public final long _available_off_heap;
public final long _desired_off_heap;
public Report(long available_off_heap, long desired_off_heap) {
_available_off_heap = available_off_heap;
_desired_off_heap = desired_off_heap;
}
public boolean isOffHeapRequirementMet() {
return _available_off_heap >= _desired_off_heap;
}
@Override
public String toString() {
return "Estimated Available Off-Heap (assuming JVM heap reaches maximum size): " + PrettyPrint.bytes(_available_off_heap) +
", Desired Off-Heap: " + PrettyPrint.bytes(_desired_off_heap);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoost.java
|
package hex.tree.xgboost;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import hex.*;
import hex.genmodel.algos.xgboost.XGBoostJavaMojoModel;
import hex.genmodel.utils.DistributionFamily;
import hex.tree.CalibrationHelper;
import hex.tree.TreeUtils;
import hex.tree.xgboost.exec.LocalXGBoostExecutor;
import hex.tree.xgboost.exec.RemoteXGBoostExecutor;
import hex.tree.xgboost.exec.XGBoostExecutor;
import hex.tree.xgboost.predict.XGBoostVariableImportance;
import hex.tree.xgboost.remote.SteamExecutorStarter;
import hex.tree.xgboost.util.FeatureScore;
import hex.util.CheckpointUtils;
import org.apache.log4j.Logger;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.Frame;
import water.fvec.RebalanceDataSet;
import water.fvec.Vec;
import water.util.ArrayUtils;
import water.util.Log;
import water.util.Timer;
import water.util.TwoDimTable;
import java.io.IOException;
import java.util.*;
import static hex.tree.SharedTree.createModelSummaryTable;
import static hex.tree.SharedTree.createScoringHistoryTable;
import static hex.tree.xgboost.util.GpuUtils.*;
import static water.H2O.technote;
/**
* H2O XGBoost
*/
public class XGBoost extends ModelBuilder<XGBoostModel,XGBoostModel.XGBoostParameters,XGBoostOutput>
implements CalibrationHelper.ModelBuilderWithCalibration<XGBoostModel, XGBoostModel.XGBoostParameters, XGBoostOutput> {
private static final Logger LOG = Logger.getLogger(XGBoost.class);
private static final double FILL_RATIO_THRESHOLD = 0.25D;
@Override public boolean haveMojo() { return true; }
@Override public boolean havePojo() { return true; }
@Override public BuilderVisibility builderVisibility() {
if(ExtensionManager.getInstance().isCoreExtensionsEnabled(XGBoostExtension.NAME)){
return BuilderVisibility.Stable;
} else {
return BuilderVisibility.Experimental;
}
}
@Override public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Regression,
ModelCategory.Binomial,
ModelCategory.Multinomial,
};
}
// Called from an http request
public XGBoost(XGBoostModel.XGBoostParameters parms ) { super(parms ); init(false); }
public XGBoost(XGBoostModel.XGBoostParameters parms, Key<XGBoostModel> key) { super(parms, key); init(false); }
public XGBoost(boolean startup_once) { super(new XGBoostModel.XGBoostParameters(),startup_once); }
public boolean isSupervised(){return true;}
// Number of trees requested, including prior trees from a checkpoint
private int _ntrees;
// Calibration frame for Platt scaling
private transient Frame _calib;
@Override protected int nModelsInParallel(int folds) {
/*
Concept of XGBoost CV parallelization:
- for CPU backend use regular strategy with defaultParallelization = 2
- for GPU backend:
- running on GPU in parallel might not be faster in all cases - but H2O currently has overhead in scoring,
and scoring is done always on CPU - we want to keep GPU busy the whole training, the idea is when one model
is being scored (on CPU) the other one is running on GPU and the GPU is never idle
- data up to a certain limit can run 2 models parallel per GPU
- big data take the whole GPU
*/
if (_parms._parallelize_cross_validation &&
XGBoostModel.getActualBackend(_parms, false) == XGBoostModel.XGBoostParameters.Backend.gpu) {
int numGPUs = _parms._gpu_id != null && _parms._gpu_id.length > 0 ? _parms._gpu_id.length : numGPUs(H2O.CLOUD.members()[0]);
int parallelizationPerGPU = _train.byteSize() < parallelTrainingSizeLimit() ? 2 : 1;
return numGPUs * parallelizationPerGPU;
} else {
return nModelsInParallel(folds, 2);
}
}
/** Start the XGBoost training Job on an F/J thread. */
@Override protected XGBoostDriver trainModelImpl() {
return new XGBoostDriver();
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made
* by the front-end whenever the GUI is clicked, and needs to be fast;
* heavy-weight prep needs to wait for the trainModel() call.
* Validate the learning rate and distribution family. */
@Override public void init(boolean expensive) {
super.init(expensive);
if (H2O.CLOUD.size() > 1 && H2O.SELF.getSecurityManager().securityEnabled) {
if (H2O.ARGS.allow_insecure_xgboost) {
LOG.info("Executing XGBoost on an secured cluster might compromise security.");
} else {
throw new H2OIllegalArgumentException("Cannot run XGBoost on an SSL enabled cluster larger than 1 node. XGBoost does not support SSL encryption.");
}
}
if (H2O.ARGS.client && _parms._build_tree_one_node)
error("_build_tree_one_node", "Cannot run on a single node in client mode.");
if (expensive) {
if (_response != null && _response.naCnt() > 0) {
error("_response_column", "Response contains missing values (NAs) - not supported by XGBoost.");
}
if(!new XGBoostExtensionCheck().doAllNodes().enabled) {
error("XGBoost", "XGBoost is not available on all nodes!");
}
}
if (_parms.hasCheckpoint()) { // Asking to continue from checkpoint?
Value cv = DKV.get(_parms._checkpoint);
if (cv != null) { // Look for prior model
XGBoostModel checkpointModel = CheckpointUtils.getAndValidateCheckpointModel(this, XGBoostModel.XGBoostParameters.CHECKPOINT_NON_MODIFIABLE_FIELDS, cv);
// Compute number of trees to build for this checkpoint
_ntrees = _parms._ntrees - checkpointModel._output._ntrees; // Needed trees
}
} else {
_ntrees = _parms._ntrees;
}
if (_parms._max_depth < 0) error("_max_depth", "_max_depth must be >= 0.");
if (_parms._max_depth == 0) _parms._max_depth = Integer.MAX_VALUE;
if (expensive) {
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(XGBoost.this);
}
if ( _parms._backend == XGBoostModel.XGBoostParameters.Backend.gpu) {
if (! hasGPU(_parms._gpu_id))
error("_backend", "GPU backend (gpu_id: " + Arrays.toString(_parms._gpu_id) + ") is not functional. Check CUDA_PATH and/or GPU installation.");
if (H2O.getCloudSize() > 1 && !_parms._build_tree_one_node && !allowMultiGPU())
error("_backend", "GPU backend is not supported in distributed mode.");
Map<String, Object> incompats = _parms.gpuIncompatibleParams();
if (! incompats.isEmpty())
for (Map.Entry<String, Object> incompat : incompats.entrySet())
error("_backend", "GPU backend is not available for parameter setting '" + incompat.getKey() + " = " + incompat.getValue() + "'. Use CPU backend instead.");
}
DistributionFamily[] allowed_distributions = new DistributionFamily[] {
DistributionFamily.AUTO,
DistributionFamily.bernoulli,
DistributionFamily.multinomial,
DistributionFamily.gaussian,
DistributionFamily.poisson,
DistributionFamily.gamma,
DistributionFamily.tweedie,
};
if (!ArrayUtils.contains(allowed_distributions, _parms._distribution))
error("_distribution", _parms._distribution.name() + " is not supported for XGBoost in current H2O.");
if (unsupportedCategoricalEncoding()) {
error("_categorical_encoding", _parms._categorical_encoding + " encoding is not supported for XGBoost in current H2O.");
}
switch( _parms._distribution) {
case bernoulli:
if( _nclass != 2 /*&& !couldBeBool(_response)*/)
error("_distribution", technote(2, "Binomial requires the response to be a 2-class categorical"));
break;
case modified_huber:
if( _nclass != 2 /*&& !couldBeBool(_response)*/)
error("_distribution", technote(2, "Modified Huber requires the response to be a 2-class categorical."));
break;
case multinomial:
if (!isClassifier()) error("_distribution", technote(2, "Multinomial requires an categorical response."));
break;
case huber:
if (isClassifier()) error("_distribution", technote(2, "Huber requires the response to be numeric."));
break;
case poisson:
if (isClassifier()) error("_distribution", technote(2, "Poisson requires the response to be numeric."));
break;
case gamma:
if (isClassifier()) error("_distribution", technote(2, "Gamma requires the response to be numeric."));
break;
case tweedie:
if (isClassifier()) error("_distribution", technote(2, "Tweedie requires the response to be numeric."));
break;
case gaussian:
if (isClassifier()) error("_distribution", technote(2, "Gaussian requires the response to be numeric."));
break;
case laplace:
if (isClassifier()) error("_distribution", technote(2, "Laplace requires the response to be numeric."));
break;
case quantile:
if (isClassifier()) error("_distribution", technote(2, "Quantile requires the response to be numeric."));
break;
case AUTO:
break;
default:
error("_distribution","Invalid distribution: " + _parms._distribution);
}
checkPositiveRate("learn_rate", _parms._learn_rate);
checkPositiveRate("sample_rate", _parms._sample_rate);
checkPositiveRate("subsample", _parms._subsample);
checkPositiveRate("col_sample_rate", _parms._col_sample_rate);
checkPositiveRate("col_sample_rate_per_tree", _parms._col_sample_rate_per_tree);
checkPositiveRate("colsample_bylevel", _parms._colsample_bylevel);
checkPositiveRate("colsample_bynode", _parms._colsample_bynode);
checkPositiveRate("colsample_bytree", _parms._colsample_bytree);
checkColumnAlias("col_sample_rate", _parms._col_sample_rate, "colsample_bylevel", _parms._colsample_bylevel, 1);
checkColumnAlias("col_sample_rate_per_tree", _parms._col_sample_rate_per_tree, "colsample_bytree", _parms._colsample_bytree, 1);
checkColumnAlias("sample_rate", _parms._sample_rate, "subsample", _parms._subsample, 1);
checkColumnAlias("learn_rate", _parms._learn_rate, "eta", _parms._eta, 0.3);
checkColumnAlias("max_abs_leafnode_pred", _parms._max_abs_leafnode_pred, "max_delta_step", _parms._max_delta_step,0);
checkColumnAlias("ntrees", _parms._ntrees, "n_estimators", _parms._n_estimators, 0);
if(_parms._tree_method.equals(XGBoostModel.XGBoostParameters.TreeMethod.approx) && (_parms._col_sample_rate < 1 || _parms._colsample_bylevel < 1)){
error("_tree_method", "approx is not supported with _col_sample_rate or _colsample_bylevel, use exact/hist instead or disable column sampling.");
}
if (_parms._scale_pos_weight != 1) {
if (_nclass != 2)
error("_scale_pos_weight", "scale_pos_weight can only be used for binary classification");
if (_parms._scale_pos_weight <= 0)
error("_scale_pos_weight", "scale_pos_weight must be a positive number");
}
if (_parms._grow_policy== XGBoostModel.XGBoostParameters.GrowPolicy.lossguide &&
_parms._tree_method!= XGBoostModel.XGBoostParameters.TreeMethod.hist)
error("_grow_policy", "must use tree_method=hist for grow_policy=lossguide");
if ((_train != null) && !_parms.monotoneConstraints().isEmpty()) {
if (_parms._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.approx) {
error("_tree_method", "approx is not supported with _monotone_constraints, use auto/exact/hist instead");
} else {
assert _parms._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.auto ||
_parms._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.exact ||
_parms._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.hist :
"Unexpected tree method used " + _parms._tree_method;
}
TreeUtils.checkMonotoneConstraints(this, _train, _parms._monotone_constraints);
}
if ((_train != null) && (H2O.CLOUD.size() > 1) &&
(_parms._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.exact) &&
!_parms._build_tree_one_node)
error("_tree_method", "exact is not supported in distributed environment, set build_tree_one_node to true to use exact");
CalibrationHelper.initCalibration(this, _parms, expensive);
if (_parms.hasCustomMetricFunc() && _parms._eval_metric != null) {
error("custom_metric_func", "Custom metric is not supported together with eval_metric parameter. Please use only one of them.");
}
if (_parms._score_eval_metric_only && _parms._eval_metric == null) {
warn("score_eval_metric_only", "score_eval_metric_only is set but eval_metric parameter is not defined");
}
}
protected void checkCustomMetricForEarlyStopping() {
if (_parms._eval_metric == null && !_parms.hasCustomMetricFunc()) {
error("_eval_metric", "Evaluation metric needs to be defined in order to use it for early stopping.");
super.checkCustomMetricForEarlyStopping();
}
}
private void checkPositiveRate(String paramName, double rateValue) {
if (rateValue <= 0 || rateValue > 1)
error("_" + paramName, paramName + " must be between 0 (exclusive) and 1 (inclusive)");
}
private void checkColumnAlias(String paramName, double paramValue, String aliasName, double aliasValue, double defaultValue) {
if (paramValue != defaultValue && aliasValue != defaultValue && paramValue != aliasValue) {
error("_" + paramName, paramName + " and its alias " + aliasName + " are both set to different value than default value. Set " + aliasName + " to default value (" + defaultValue + "), to use " + paramName + " actual value.");
} else if (aliasValue != defaultValue){
warn("_"+paramName, "Using user-provided parameter "+aliasName+" instead of "+paramName+".\"");
}
}
@Override
protected void checkEarlyStoppingReproducibility() {
if (_parms._score_tree_interval == 0 && !_parms._score_each_iteration) {
warn("_stopping_rounds", "early stopping is enabled but neither score_tree_interval or score_each_iteration are defined. Early stopping will not be reproducible!");
}
}
static boolean allowMultiGPU() {
return H2O.getSysBoolProperty("xgboost.multinode.gpu.enabled", false);
}
static long parallelTrainingSizeLimit() {
long defaultLimit = (long) 1e9; // 1GB; current GPUs typically have at least 8GB of memory - plenty of buffer left
String limitSpec = H2O.getSysProperty("xgboost.gpu.parallelTrainingSizeLimit", Long.toString(defaultLimit));
return Long.parseLong(limitSpec);
}
static boolean prestartExternalClusterForCV() {
return H2O.getSysBoolProperty("xgboost.external.cv.prestart", false);
}
@Override
public XGBoost getModelBuilder() {
return this;
}
@Override
public Frame getCalibrationFrame() {
return _calib;
}
@Override
public void setCalibrationFrame(Frame f) {
_calib = f;
}
@Override
protected boolean canLearnFromNAs() {
return true;
}
static DataInfo makeDataInfo(Frame train, Frame valid, XGBoostModel.XGBoostParameters parms) {
DataInfo dinfo = new DataInfo(
train,
valid,
1, //nResponses
true, //all factor levels
DataInfo.TransformType.NONE, //do not standardize
DataInfo.TransformType.NONE, //do not standardize response
false, //whether to skip missing
false, // do not replace NAs in numeric cols with mean
true, // always add a bucket for missing values
parms._weights_column != null, // observation weights
parms._offset_column != null,
parms._fold_column != null
);
assert !dinfo._predictor_transform.isMeanAdjusted() : "Unexpected predictor transform, it shouldn't be mean adjusted";
assert !dinfo._predictor_transform.isSigmaScaled() : "Unexpected predictor transform, it shouldn't be sigma scaled";
assert !dinfo._response_transform.isMeanAdjusted() : "Unexpected response transform, it shouldn't be mean adjusted";
assert !dinfo._response_transform.isSigmaScaled() : "Unexpected response transform, it shouldn't be sigma scaled";
dinfo.coefNames(); // cache the coefficient names
dinfo.coefOriginalColumnIndices(); // cache the original column indices
assert dinfo._coefNames != null && dinfo._coefOriginalIndices != null;
return dinfo;
}
@Override
protected Frame rebalance(Frame original_fr, boolean local, String name) {
if (original_fr == null) return null;
else if (_parms._build_tree_one_node) {
int original_chunks = original_fr.anyVec().nChunks();
if (original_chunks == 1)
return original_fr;
LOG.info("Rebalancing " + name.substring(name.length()-5) + " dataset onto a single node.");
Key<Frame> newKey = Key.make(name + ".1chk");
Frame singleChunkFr = RebalanceDataSet.toSingleChunk(original_fr, newKey);
Scope.track(singleChunkFr);
return singleChunkFr;
} else {
return super.rebalance(original_fr, local, name);
}
}
// ----------------------
class XGBoostDriver extends Driver {
@Override
public void computeImpl() {
init(true); //this can change the seed if it was set to -1
// Something goes wrong
if (error_count() > 0)
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(XGBoost.this);
buildModel();
}
private XGBoostExecutor makeExecutor(XGBoostModel model, boolean useValidFrame) throws IOException {
final Frame valid = useValidFrame ? _valid : null;
if (H2O.ARGS.use_external_xgboost) {
return SteamExecutorStarter.getInstance().getRemoteExecutor(model, _train, valid, _job);
} else {
String remoteUriFromProp = H2O.getSysProperty("xgboost.external.address", null);
if (remoteUriFromProp == null) {
return new LocalXGBoostExecutor(model, _train, valid);
} else {
String userName = H2O.getSysProperty("xgboost.external.user", null);
String password = H2O.getSysProperty("xgboost.external.password", null);
return new RemoteXGBoostExecutor(model, _train, valid, remoteUriFromProp, userName, password);
}
}
}
final void buildModel() {
final XGBoostModel model;
if (_parms.hasCheckpoint()) {
XGBoostModel checkpoint = DKV.get(_parms._checkpoint).<XGBoostModel>get().deepClone(_result);
checkpoint._parms = _parms;
model = checkpoint.delete_and_lock(_job);
} else {
model = new XGBoostModel(_result, _parms, new XGBoostOutput(XGBoost.this), _train, _valid);
model.write_lock(_job);
}
if (_parms._dmatrix_type == XGBoostModel.XGBoostParameters.DMatrixType.sparse) {
model._output._sparse = true;
} else if (_parms._dmatrix_type == XGBoostModel.XGBoostParameters.DMatrixType.dense) {
model._output._sparse = false;
} else {
model._output._sparse = isTrainDatasetSparse();
}
if (model.evalAutoParamsEnabled) {
model.initActualParamValuesAfterOutputSetup(isClassifier(), _nclass);
}
XGBoostUtils.createFeatureMap(model, _train);
XGBoostVariableImportance variableImportance = model.setupVarImp();
boolean scoreValidFrame = _valid != null && _parms._eval_metric != null;
LOG.info("Need to score validation frame by XGBoost native backend: " + scoreValidFrame);
try (XGBoostExecutor exec = makeExecutor(model, scoreValidFrame)) {
model.model_info().updateBoosterBytes(exec.setup());
scoreAndBuildTrees(model, exec, variableImportance);
} catch (Exception e) {
throw new RuntimeException("Error while training XGBoost model", e);
} finally {
variableImportance.cleanup();
// Unlock & save results
model.unlock(_job);
}
}
/**
* @return True if train dataset is sparse, otherwise false.
*/
private boolean isTrainDatasetSparse() {
long nonZeroCount = 0;
int nonCategoricalColumns = 0;
long oneHotEncodedColumns = 0;
for (int i = 0; i < _train.numCols(); ++i) {
if (_train.name(i).equals(_parms._response_column)) continue;
if (_train.name(i).equals(_parms._weights_column)) continue;
if (_train.name(i).equals(_parms._fold_column)) continue;
if (_train.name(i).equals(_parms._offset_column)) continue;
final Vec vector = _train.vec(i);
if (vector.isCategorical()) {
nonZeroCount += _train.numRows();
} else {
nonZeroCount += vector.nzCnt();
}
if (vector.isCategorical()) {
oneHotEncodedColumns += vector.cardinality();
} else {
nonCategoricalColumns++;
}
}
final long totalColumns = oneHotEncodedColumns + nonCategoricalColumns;
final double denominator = (double) totalColumns * _train.numRows();
final double fillRatio = (double) nonZeroCount / denominator;
LOG.info("fill ratio: " + fillRatio);
return fillRatio < FILL_RATIO_THRESHOLD
|| ((_train.numRows() * totalColumns) > Integer.MAX_VALUE);
}
private void scoreAndBuildTrees(final XGBoostModel model, final XGBoostExecutor exec, XGBoostVariableImportance varImp) {
long scoringTime = 0;
for (int tid = 0; tid < _ntrees; tid++) {
if (_job.stop_requested() && tid > 0) break;
// During first iteration model contains 0 trees, then 1-tree, ...
long scoringStart = System.currentTimeMillis();
boolean scored = doScoring(model, exec, varImp, false, _parms._score_eval_metric_only);
scoringTime += System.currentTimeMillis() - scoringStart;
if (scored && ScoreKeeper.stopEarly(model._output.scoreKeepers(), _parms._stopping_rounds, ScoreKeeper.ProblemType.forSupervised(_nclass > 1), _parms._stopping_metric, _parms._stopping_tolerance, "model's last", true)) {
LOG.info("Early stopping triggered - stopping XGBoost training");
LOG.info("Setting actual ntrees to the " + model._output._ntrees);
_parms._ntrees = model._output._ntrees;
break;
}
Timer kb_timer = new Timer();
exec.update(tid);
LOG.info((tid + 1) + ". tree was built in " + kb_timer.toString());
_job.update(1);
model._output._ntrees++;
model._output._scored_train = ArrayUtils.copyAndFillOf(model._output._scored_train, model._output._ntrees+1, new ScoreKeeper());
model._output._scored_valid = model._output._scored_valid != null ? ArrayUtils.copyAndFillOf(model._output._scored_valid, model._output._ntrees+1, new ScoreKeeper()) : null;
model._output._training_time_ms = ArrayUtils.copyAndFillOf(model._output._training_time_ms, model._output._ntrees+1, System.currentTimeMillis());
if (stop_requested() && !timeout()) throw new Job.JobCancelledException(_job);
if (timeout()) {
LOG.info("Stopping XGBoost training because of timeout");
break;
}
}
Map<String, Integer> monotoneConstraints = _parms.monotoneConstraints();
if (!monotoneConstraints.isEmpty() &&
_parms._booster != XGBoostModel.XGBoostParameters.Booster.gblinear &&
monotonicityConstraintCheckEnabled()
) {
_job.update(0, "Checking monotonicity constraints on the final model");
model.model_info().updateBoosterBytes(exec.updateBooster());
checkMonotonicityConstraints(model.model_info(), monotoneConstraints);
}
if (_parms._interaction_constraints != null &&
interactionConstraintCheckEnabled()) {
_job.update(0, "Checking interaction constraints on the final model");
model.model_info().updateBoosterBytes(exec.updateBooster());
checkInteractionConstraints(model.model_info(), _parms._interaction_constraints);
}
_job.update(0, "Scoring the final model");
// Final scoring
long scoringStart = System.currentTimeMillis();
doScoring(model, exec, varImp, true, _parms._score_eval_metric_only);
scoringTime += System.currentTimeMillis() - scoringStart;
// Finish remaining work (if stopped early)
_job.update(_parms._ntrees-model._output._ntrees);
Log.info("In-training scoring took " + scoringTime + "ms.");
}
private boolean monotonicityConstraintCheckEnabled() {
return Boolean.parseBoolean(getSysProperty("xgboost.monotonicity.checkEnabled", "true"));
}
private boolean interactionConstraintCheckEnabled() {
return Boolean.parseBoolean(getSysProperty("xgboost.interactions.checkEnabled", "true"));
}
private void checkMonotonicityConstraints(XGBoostModelInfo model_info, Map<String, Integer> monotoneConstraints) {
GradBooster booster = XGBoostJavaMojoModel.makePredictor(model_info._boosterBytes, null).getBooster();
if (!(booster instanceof GBTree)) {
throw new IllegalStateException("Expected booster object to be GBTree instead it is " + booster.getClass().getName());
}
final RegTree[][] groupedTrees = ((GBTree) booster).getGroupedTrees();
final XGBoostUtils.FeatureProperties featureProperties = XGBoostUtils.assembleFeatureNames(model_info.dataInfo()); // XGBoost's usage of one-hot encoding assumed
for (RegTree[] classTrees : groupedTrees) {
for (RegTree tree : classTrees) {
if (tree == null) continue;
checkMonotonicityConstraints(tree.getNodes(), monotoneConstraints, featureProperties);
}
}
}
private void checkMonotonicityConstraints(RegTreeNode[] tree, Map<String, Integer> monotoneConstraints, XGBoostUtils.FeatureProperties featureProperties) {
float[] mins = new float[tree.length];
int[] min_ids = new int[tree.length];
float[] maxs = new float[tree.length];
int[] max_ids = new int[tree.length];
rollupMinMaxPreds(tree, 0, mins, min_ids, maxs, max_ids);
for (RegTreeNode node : tree) {
if (node.isLeaf()) continue;
String splitColumn = featureProperties._names[node.getSplitIndex()];
if (!monotoneConstraints.containsKey(splitColumn)) continue;
int constraint = monotoneConstraints.get(splitColumn);
int left = node.getLeftChildIndex();
int right = node.getRightChildIndex();
if (constraint > 0) {
if (maxs[left] > mins[right]) {
throw new IllegalStateException("Monotonicity constraint " + constraint + " violated on column '" + splitColumn + "' (max(left) > min(right)): " +
maxs[left] + " > " + mins[right] +
"\nNode: " + node +
"\nLeft Node (max): " + tree[max_ids[left]] +
"\nRight Node (min): " + tree[min_ids[right]]);
}
} else if (constraint < 0) {
if (mins[left] < maxs[right]) {
throw new IllegalStateException("Monotonicity constraint " + constraint + " violated on column '" + splitColumn + "' (min(left) < max(right)): " +
mins[left] + " < " + maxs[right] +
"\nNode: " + node +
"\nLeft Node (min): " + tree[min_ids[left]] +
"\nRight Node (max): " + tree[max_ids[right]]);
}
}
}
}
private void rollupMinMaxPreds(RegTreeNode[] tree, int nid, float[] mins, int min_ids[], float[] maxs, int[] max_ids) {
RegTreeNode node = tree[nid];
if (node.isLeaf()) {
mins[nid] = node.getLeafValue();
min_ids[nid] = nid;
maxs[nid] = node.getLeafValue();
max_ids[nid] = nid;
return;
}
int left = node.getLeftChildIndex();
int right = node.getRightChildIndex();
rollupMinMaxPreds(tree, left, mins, min_ids, maxs, max_ids);
rollupMinMaxPreds(tree, right, mins, min_ids, maxs, max_ids);
final int min_id = mins[left] < mins[right] ? left : right;
mins[nid] = mins[min_id];
min_ids[nid] = min_ids[min_id];
final int max_id = maxs[left] > maxs[right] ? left : right;
maxs[nid] = maxs[max_id];
max_ids[nid] = max_ids[max_id];
}
private void checkInteractionConstraints(XGBoostModelInfo model_info, String[][] interactionConstraints) {
GradBooster booster = XGBoostJavaMojoModel.makePredictor(model_info._boosterBytes, null).getBooster();
if (!(booster instanceof GBTree)) {
throw new IllegalStateException("Expected booster object to be GBTree instead it is " + booster.getClass().getName());
}
final RegTree[][] groupedTrees = ((GBTree) booster).getGroupedTrees();
final XGBoostUtils.FeatureProperties featureProperties = XGBoostUtils.assembleFeatureNames(model_info.dataInfo()); // XGBoost's usage of one-hot encoding assumed
// create map of constraint unions
Map<Integer, Set<Integer>> interactionUnions = new HashMap<>();
for(String[] interaction : interactionConstraints){
Integer[] mapOfIndices = featureProperties.mapOriginalNamesToIndices(interaction);
for(int index : mapOfIndices){
if(!interactionUnions.containsKey(index)) {
interactionUnions.put(index, new HashSet<>());
}
interactionUnions.get(index).addAll(Arrays.asList(mapOfIndices));
}
}
for (RegTree[] classTrees : groupedTrees) {
for (RegTree tree : classTrees) {
if (tree == null) continue;
RegTreeNode[] treeNodes = tree.getNodes();
checkInteractionConstraints(treeNodes, treeNodes[0], interactionUnions, featureProperties);
}
}
}
private void checkInteractionConstraints(RegTreeNode[] tree, RegTreeNode node, Map<Integer, Set<Integer>> interactionUnions, XGBoostUtils.FeatureProperties featureProperties){
if (node.isLeaf()) {
return;
}
int splitIndex = node.getSplitIndex();
int splitIndexOriginal = featureProperties._originalColumnIndices[splitIndex];
Set<Integer> interactionUnion = interactionUnions.get(splitIndexOriginal);
RegTreeNode leftChildNode = tree[node.getLeftChildIndex()];
// if left child node is not leaf - check left child
if(!leftChildNode.isLeaf()) {
int leftChildSplitIndex = leftChildNode.getSplitIndex();
int leftChildSplitIndexOriginal = featureProperties._originalColumnIndices[leftChildSplitIndex];
// check left child split column is the same as parent or is in parent constrained union - if not violate constraint
if (leftChildSplitIndex != splitIndex && (interactionUnion == null || !interactionUnion.contains(leftChildSplitIndexOriginal))) {
String parentOriginalName = featureProperties._originalNames[splitIndexOriginal];
String interactionString = generateInteractionConstraintUnionString(featureProperties._originalNames, splitIndexOriginal, interactionUnion);
String leftOriginalName = featureProperties._originalNames[leftChildSplitIndexOriginal];
throw new IllegalStateException("Interaction constraint violated on column '" + leftOriginalName+ "': The parent column '"+parentOriginalName+"' can interact only with "+interactionString+" columns.");
}
}
RegTreeNode rightChildNode = tree[node.getRightChildIndex()];
// if right child node is not leaf - check right child
if(!rightChildNode.isLeaf()) {
int rightChildSplitIndex = rightChildNode.getSplitIndex();
int rightChildSplitIndexOriginal = featureProperties._originalColumnIndices[rightChildSplitIndex];
// check right child split column is the same as parent or is in parent constrained union - if not violate constraint
if (rightChildSplitIndex != splitIndex && (interactionUnion == null || !interactionUnion.contains(rightChildSplitIndexOriginal))) {
String parentOriginalName = featureProperties._originalNames[splitIndexOriginal];
String interactionString = generateInteractionConstraintUnionString(featureProperties._originalNames, splitIndexOriginal, interactionUnion);
String rightOriginalName = featureProperties._originalNames[rightChildSplitIndexOriginal];
throw new IllegalStateException("Interaction constraint violated on column '" + rightOriginalName+ "': The parent column '"+parentOriginalName+"' can interact only with "+interactionString+" columns.");
}
}
checkInteractionConstraints(tree, leftChildNode, interactionUnions, featureProperties);
checkInteractionConstraints(tree, rightChildNode, interactionUnions, featureProperties);
}
private String generateInteractionConstraintUnionString(String[] originalNames, int splitIndexOriginal, Set<Integer> interactionUnion){
String parentOriginalName = originalNames[splitIndexOriginal];
String interaction = "['" + parentOriginalName + "']";
if (interactionUnion != null) {
StringBuilder sb = new StringBuilder("[");
for(Integer i: interactionUnion){
sb.append(originalNames[i]).append(",");
}
interaction = sb.replace(sb.length()-1, sb.length(), "]").toString();
}
return interaction;
}
long _firstScore = 0;
long _timeLastScoreStart = 0;
long _timeLastScoreEnd = 0;
private boolean doScoring(final XGBoostModel model, final XGBoostExecutor exec, XGBoostVariableImportance varImp,
boolean finalScoring, boolean scoreEvalMetricOnly) {
boolean scored = false;
long now = System.currentTimeMillis();
if (_firstScore == 0) _firstScore = now;
long sinceLastScore = now - _timeLastScoreStart;
_job.update(0, "Built " + model._output._ntrees + " trees so far (out of " + _parms._ntrees + ").");
boolean timeToScore = (now - _firstScore < _parms._initial_score_interval) || // Score every time for 4 secs
// Throttle scoring to keep the cost sane; limit to a 10% duty cycle & every 4 secs
(sinceLastScore > _parms._score_interval && // Limit scoring updates to every 4sec
(double) (_timeLastScoreEnd - _timeLastScoreStart) / sinceLastScore < 0.1); //10% duty cycle
boolean manualInterval = _parms._score_tree_interval > 0 && model._output._ntrees % _parms._score_tree_interval == 0;
// Now model already contains tid-trees in serialized form
if (_parms._score_each_iteration || finalScoring || // always score under these circumstances
(timeToScore && _parms._score_tree_interval == 0) || // use time-based duty-cycle heuristic only if the user didn't specify _score_tree_interval
manualInterval) {
final XGBoostOutput out = model._output;
final boolean boosterUpdated;
_timeLastScoreStart = now;
CustomMetric customMetricTrain = _parms._eval_metric != null ? toCustomMetricTrain(exec.getEvalMetric()) : null;
CustomMetric customMetricValid = _parms._eval_metric != null && _valid != null ? toCustomMetricValid(exec.getEvalMetric()) : null;
if (!finalScoring && scoreEvalMetricOnly && customMetricTrain != null) {
out._scored_train[out._ntrees]._custom_metric = customMetricTrain.value;
if (customMetricValid != null) {
out._useValidForScoreKeeping = true;
out._scored_valid[out._ntrees]._custom_metric = customMetricValid.value;
}
boosterUpdated = false;
} else {
model.model_info().updateBoosterBytes(exec.updateBooster());
boosterUpdated = true;
model.doScoring(_train, _parms.train(), customMetricTrain, _valid, _parms.valid(), customMetricValid);
}
_timeLastScoreEnd = System.currentTimeMillis();
out._model_summary = createModelSummaryTable(out._ntrees, null);
out._scoring_history = createScoringHistoryTable(out, model._output._scored_train, out._scored_valid, _job, out._training_time_ms, _parms._custom_metric_func != null || _parms._eval_metric != null, false);
if (boosterUpdated) {
final Map<String, FeatureScore> varimp = varImp.getFeatureScores(model.model_info()._boosterBytes);
out._varimp = computeVarImp(varimp);
if (out._varimp != null) {
out._variable_importances = createVarImpTable(null, ArrayUtils.toDouble(out._varimp._varimp), out._varimp._names);
out._variable_importances_cover = createVarImpTable("Cover", ArrayUtils.toDouble(out._varimp._covers), out._varimp._names);
out._variable_importances_frequency = createVarImpTable("Frequency", ArrayUtils.toDouble(out._varimp._freqs), out._varimp._names);
}
}
model.update(_job);
LOG.info(model);
scored = true;
}
// Model Calibration (only for the final model, not CV models)
if (finalScoring && _parms.calibrateModel() && (!_parms._is_cv_model)) {
model._output.setCalibrationModel(
CalibrationHelper.buildCalibrationModel(XGBoost.this, _parms, _job, model)
);
model.update(_job);
}
return scored;
}
}
static CustomMetric toCustomMetricTrain(EvalMetric evalMetric) {
return toCustomMetric(evalMetric, true);
}
static CustomMetric toCustomMetricValid(EvalMetric evalMetric) {
return toCustomMetric(evalMetric, false);
}
private static CustomMetric toCustomMetric(EvalMetric evalMetric, boolean isTrain) {
if (evalMetric == null) {
return null;
}
return CustomMetric.from(evalMetric._name, isTrain ? evalMetric._trainValue : evalMetric._validValue);
}
private static TwoDimTable createVarImpTable(String name, double[] rel_imp, String[] coef_names) {
return hex.ModelMetrics.calcVarImp(rel_imp, coef_names, "Variable Importances" + (name != null ? " - " + name : ""),
new String[]{"Relative Importance", "Scaled Importance", "Percentage"});
}
private static XgbVarImp computeVarImp(Map<String, FeatureScore> varimp) {
if (varimp.isEmpty())
return null;
float[] gains = new float[varimp.size()];
float[] covers = new float[varimp.size()];
int[] freqs = new int[varimp.size()];
String[] names = new String[varimp.size()];
int j = 0;
for (Map.Entry<String, FeatureScore> it : varimp.entrySet()) {
gains[j] = it.getValue()._gain;
covers[j] = it.getValue()._cover;
freqs[j] = it.getValue()._frequency;
names[j] = it.getKey();
j++;
}
return new XgbVarImp(names, gains, covers, freqs);
}
@Override
protected CVModelBuilder makeCVModelBuilder(ModelBuilder<?, ?, ?>[] modelBuilders, int parallelization) {
if (XGBoostModel.getActualBackend(_parms, false) == XGBoostModel.XGBoostParameters.Backend.gpu && parallelization > 1) {
return new XGBoostGPUCVModelBuilder(_job, modelBuilders, parallelization, _parms._gpu_id);
} else if (H2O.ARGS.use_external_xgboost && prestartExternalClusterForCV()) {
return new XGBoostExternalCVModelBuilder(_job, modelBuilders, parallelization, SteamExecutorStarter.getInstance());
} else {
return super.makeCVModelBuilder(modelBuilders, parallelization);
}
}
@Override public void cv_computeAndSetOptimalParameters(ModelBuilder<XGBoostModel,XGBoostModel.XGBoostParameters,XGBoostOutput>[] cvModelBuilders) {
if( _parms._stopping_rounds == 0 && _parms._max_runtime_secs == 0) return; // No exciting changes to stopping conditions
// Extract stopping conditions from each CV model, and compute the best stopping answer
_parms._stopping_rounds = 0;
setMaxRuntimeSecsForMainModel();
int sum = 0;
for (ModelBuilder mb : cvModelBuilders)
sum += ((XGBoostOutput) DKV.<Model>getGet(mb.dest())._output)._ntrees;
_parms._ntrees = (int)((double)sum/cvModelBuilders.length);
warn("_ntrees", "Setting optimal _ntrees to " + _parms._ntrees + " for cross-validation main model based on early stopping of cross-validation models.");
warn("_stopping_rounds", "Disabling convergence-based early stopping for cross-validation main model.");
warn("_max_runtime_secs", "Disabling maximum allowed runtime for cross-validation main model.");
}
private boolean unsupportedCategoricalEncoding() {
return _parms._categorical_encoding == Model.Parameters.CategoricalEncodingScheme.Enum ||
_parms._categorical_encoding == Model.Parameters.CategoricalEncodingScheme.Eigen;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostExtension.java
|
package hex.tree.xgboost;
import ai.h2o.xgboost4j.java.INativeLibLoader;
import ai.h2o.xgboost4j.java.NativeLibLoader;
import hex.tree.xgboost.util.NativeLibrary;
import hex.tree.xgboost.util.NativeLibraryLoaderChain;
import org.apache.log4j.Logger;
import water.AbstractH2OExtension;
import water.H2O;
import java.io.IOException;
import java.util.Arrays;
/**
* XGBoost Extension
*
* This is responsible for early initialization of
* XGBoost per cluster node. The registration
* of XGBoost REST API requires thix extension
* to be enabled.
*/
public class XGBoostExtension extends AbstractH2OExtension {
private static final Logger LOG = Logger.getLogger(XGBoostExtension.class);
private static final String XGBOOST_MIN_REQUIREMENTS =
"Xgboost (enabled GPUs) needs: \n"
+ " - CUDA 8.0\n"
+ "XGboost (minimal version) needs: \n"
+ " - GCC 4.7+\n"
+ "Apple silicon is not supported yet\n"
+ "For more details, run in debug mode: `java -Dlog4j.configuration=file:///tmp/log4j.properties -jar h2o.jar`\n";
// XGBoost initialization sequence was called flag
private boolean isInitCalled = false;
// XGBoost binary presence on the system
private boolean isXgboostPresent = false;
private NativeLibInfo nativeLibInfo = null;
public static String NAME = "XGBoost";
@Override
public String getExtensionName() {
return NAME;
}
@Override
public boolean isEnabled() {
// Check if some native library was loaded
if (!isInitCalled) {
synchronized (this) {
if (!isInitCalled) {
isXgboostPresent = initXgboost();
isInitCalled = true;
}
}
}
return isXgboostPresent;
}
public void logNativeLibInfo() {
if (nativeLibInfo == null) {
LOG.warn("No native XGBoost library found.");
return;
}
LOG.info("Found XGBoost backend with library: " + nativeLibInfo.name);
if (nativeLibInfo.flags.length == 0) {
LOG.warn("Your system supports only minimal version of XGBoost (no GPUs, no multithreading)!");
} else {
LOG.info("XGBoost supported backends: " + Arrays.toString(nativeLibInfo.flags));
}
}
public static NativeLibraryLoaderChain getLoader() throws IOException {
INativeLibLoader loader = NativeLibLoader.getLoader();
if (! (loader instanceof NativeLibraryLoaderChain)) {
LOG.warn("Unexpected XGBoost library loader found. Custom loaders are not supported in this version. " +
"XGBoost extension will be disabled.");
return null;
}
return(NativeLibraryLoaderChain) loader;
}
@Override
public void onLocalNodeStarted() {
if (!isEnabled())
return;
final double ratio = H2O.ARGS.off_heap_memory_ratio;
if (H2O.ARGS.off_heap_memory_ratio > 0) {
MemoryCheck.Report report = MemoryCheck.runCheck(ratio);
if (!report.isOffHeapRequirementMet()) {
LOG.warn("There doesn't seem to be enough memory available for XGBoost model training (off_heap_memory_ratio=" + ratio + "), " +
"training XGBoost models is not advised. Details: " + report);
}
}
}
private boolean initXgboost() {
try {
NativeLibraryLoaderChain chainLoader = getLoader();
if (chainLoader == null)
return false;
NativeLibrary lib = chainLoader.getLoadedLibrary();
nativeLibInfo = new NativeLibInfo(lib);
return true;
} catch (IOException e) {
// Ups no lib loaded or load failed
LOG.debug("Cause of the xgboost unsuccessful load", e);
LOG.warn("Cannot initialize XGBoost backend! " + XGBOOST_MIN_REQUIREMENTS);
return false;
}
}
private static class NativeLibInfo {
String name;
NativeLibrary.CompilationFlags[] flags;
private NativeLibInfo(NativeLibrary nl) {
name = nl.getName();
flags = nl.getCompilationFlags();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostExtensionCheck.java
|
package hex.tree.xgboost;
import water.ExtensionManager;
import water.MRTask;
public class XGBoostExtensionCheck extends MRTask<XGBoostExtensionCheck> {
boolean enabled;
@Override
protected void setupLocal() {
super.setupLocal();
enabled = ExtensionManager.getInstance().isCoreExtensionEnabled(XGBoostExtension.NAME);
}
@Override
public void reduce(XGBoostExtensionCheck mrt) {
super.reduce(mrt);
enabled = enabled && mrt.enabled;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostExternalCVModelBuilder.java
|
package hex.tree.xgboost;
import hex.CVModelBuilder;
import hex.ModelBuilder;
import hex.tree.xgboost.remote.SteamExecutorStarter;
import org.apache.log4j.Logger;
import water.Job;
import java.io.IOException;
public class XGBoostExternalCVModelBuilder extends CVModelBuilder {
private static final Logger LOG = Logger.getLogger(XGBoostExternalCVModelBuilder.class);
private final SteamExecutorStarter _starter;
private boolean _initialized;
public XGBoostExternalCVModelBuilder(
Job job,
ModelBuilder<?, ?, ?>[] modelBuilders,
int parallelization,
SteamExecutorStarter starter
) {
super(job, modelBuilders, parallelization);
_starter = starter;
}
@Override
protected void prepare(ModelBuilder<?, ?, ?> m) {
if (!_initialized) {
XGBoost xgb = (XGBoost) m;
// We try to the cluster start just one time before CV models are executed in parallel. This way
// the CV models don't need to compete for who will succeed starting the cluster - the flow is more
// predictable and easier to debug.
try {
prepareCluster(xgb);
} catch (Exception e) { // ignore, give another chance to start in CV models (same as before this change)
LOG.error("Failed to prepare an external XGBoost cluster, " +
"individual CV models will attempt to start the cluster again.", e);
}
_initialized = true;
}
}
@SuppressWarnings("unchecked")
void prepareCluster(XGBoost xgb) {
LOG.info("Requesting external cluster for model " + xgb.dest());
try {
_starter.startCluster(xgb.dest(), getJob());
} catch (IOException e) {
throw new IllegalStateException("Failed to start external XGBoost cluster", e);
}
LOG.info("External cluster successfully initialized");
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostGPUCVModelBuilder.java
|
package hex.tree.xgboost;
import hex.CVModelBuilder;
import hex.ModelBuilder;
import hex.tree.xgboost.util.GpuUtils;
import org.apache.log4j.Logger;
import water.Job;
import java.util.*;
public class XGBoostGPUCVModelBuilder extends CVModelBuilder {
private static final Logger LOG = Logger.getLogger(XGBoostGPUCVModelBuilder.class);
private final GPUAllocator _allocator;
public XGBoostGPUCVModelBuilder(
Job<?> job,
ModelBuilder<?, ?, ?>[] modelBuilders,
int parallelization,
int[] gpuIds
) {
super(job, modelBuilders, parallelization);
final List<Integer> availableGpus;
if (gpuIds != null && gpuIds.length > 0) {
availableGpus = new LinkedList<>();
for (int id : gpuIds) availableGpus.add(id);
} else {
availableGpus = new LinkedList<>(GpuUtils.allGPUs());
}
LOG.info("Available #GPUs for CV model training: " + availableGpus.size());
_allocator = new GPUAllocator(availableGpus);
}
@Override
protected void prepare(ModelBuilder<?, ?, ?> m) {
XGBoost xgb = (XGBoost) m;
xgb._parms._gpu_id = new int[] { _allocator.takeLeastUtilizedGPU() };
LOG.info("Building " + xgb.dest() + " on GPU " + xgb._parms._gpu_id[0]);
}
@Override
protected void finished(ModelBuilder<?, ?, ?> m) {
XGBoost xgb = (XGBoost) m;
_allocator.releaseGPU(xgb._parms._gpu_id[0]);
}
static class GPUAllocator {
final int[] _gpu_utilization;
GPUAllocator(List<Integer> gpuIds) {
this(initUtilization(gpuIds));
}
GPUAllocator(int[] gpuUtilization) {
_gpu_utilization = gpuUtilization;
}
static int[] initUtilization(List<Integer> gpus) {
final int maxGpuId = gpus.stream().max(Integer::compareTo)
.orElseThrow(() -> new IllegalStateException("There are no GPUs available for XGBoost (" + gpus + ")."));
final int[] utilization = new int[maxGpuId + 1];
Arrays.fill(utilization, -1);
gpus.forEach(id -> utilization[id] = 0);
return utilization;
}
void releaseGPU(int id) {
_gpu_utilization[id]--;
}
int takeLeastUtilizedGPU() {
int id = -1;
for (int i = 0; i < _gpu_utilization.length; i++) {
if (_gpu_utilization[i] == -1)
continue;
if ((id == -1) || (_gpu_utilization[i] < _gpu_utilization[id])) {
id = i;
}
}
assert id != -1;
_gpu_utilization[id]++;
return id;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostModel.java
|
package hex.tree.xgboost;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import biz.k11i.xgboost.tree.RegTreeNodeStat;
import hex.*;
import hex.genmodel.algos.tree.*;
import hex.genmodel.algos.xgboost.XGBoostJavaMojoModel;
import hex.genmodel.algos.xgboost.XGBoostMojoModel;
import hex.genmodel.utils.DistributionFamily;
import hex.tree.FriedmanPopescusH;
import hex.tree.CalibrationHelper;
import hex.tree.xgboost.predict.*;
import hex.tree.xgboost.util.PredictConfiguration;
import hex.util.EffectiveParametersUtils;
import org.apache.log4j.Logger;
import water.*;
import water.codegen.CodeGeneratorPipeline;
import water.fvec.Frame;
import water.fvec.Vec;
import water.udf.CFuncRef;
import water.util.*;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Stream;
import static hex.genmodel.algos.xgboost.XGBoostMojoModel.ObjectiveType;
import static hex.tree.xgboost.XGBoost.makeDataInfo;
import static hex.tree.xgboost.util.GpuUtils.hasGPU;
import static water.H2O.OptArgs.SYSTEM_PROP_PREFIX;
public class XGBoostModel extends Model<XGBoostModel, XGBoostModel.XGBoostParameters, XGBoostOutput>
implements SharedTreeGraphConverter, Model.LeafNodeAssignment, Model.Contributions, FeatureInteractionsCollector, Model.UpdateAuxTreeWeights, FriedmanPopescusHCollector {
private static final Logger LOG = Logger.getLogger(XGBoostModel.class);
private static final String PROP_VERBOSITY = H2O.OptArgs.SYSTEM_PROP_PREFIX + "xgboost.verbosity";
private static final String PROP_NTHREAD = SYSTEM_PROP_PREFIX + "xgboost.nthreadMax";
private XGBoostModelInfo model_info;
public XGBoostModelInfo model_info() { return model_info; }
public static class XGBoostParameters extends Model.Parameters implements Model.GetNTrees, CalibrationHelper.ParamsWithCalibration {
public enum TreeMethod {
auto, exact, approx, hist
}
public enum GrowPolicy {
depthwise, lossguide
}
public enum Booster {
gbtree, gblinear, dart
}
public enum DartSampleType {
uniform, weighted
}
public enum DartNormalizeType {
tree, forest
}
public enum DMatrixType {
auto, dense, sparse
}
public enum Backend {
auto, gpu, cpu
}
public enum FeatureSelector {
cyclic, shuffle, random, greedy, thrifty
}
public enum Updater {
gpu_hist, shotgun, coord_descent, gpu_coord_descent,
}
// H2O GBM options
public boolean _quiet_mode = true;
public int _ntrees = 50; // Number of trees in the final model. Grid Search, comma sep values:50,100,150,200
/**
* @deprecated will be removed in 3.30.0.1, use _ntrees
*/
public int _n_estimators; // This doesn't seem to be used anywhere... (not in clients)
public int _max_depth = 6; // Maximum tree depth. Grid Search, comma sep values:5,7
public double _min_rows = 1;
public double _min_child_weight = 1;
public double _learn_rate = 0.3;
public double _eta = 0.3;
public double _learn_rate_annealing = 1;
public double _sample_rate = 1.0;
public double _subsample = 1.0;
public double _col_sample_rate = 1.0;
public double _colsample_bylevel = 1.0;
public double _colsample_bynode = 1.0;
public double _col_sample_rate_per_tree = 1.0; //fraction of columns to sample for each tree
public double _colsample_bytree = 1.0;
public KeyValue[] _monotone_constraints;
public String[][] _interaction_constraints;
public float _max_abs_leafnode_pred = 0;
public float _max_delta_step = 0;
public int _score_tree_interval = 0; // score every so many trees (no matter what)
public int _initial_score_interval = 4000; //Adding this parameter to take away the hard coded value of 4000 for scoring the first 4 secs
public int _score_interval = 4000; //Adding this parameter to take away the hard coded value of 4000 for scoring each iteration every 4 secs
public float _min_split_improvement = 0;
public float _gamma;
// Runtime options
public int _nthread = -1;
public String _save_matrix_directory; // dump the xgboost matrix to this directory
public boolean _build_tree_one_node = false; // force to run on single node
// LightGBM specific (only for grow_policy == lossguide)
public int _max_bins = 256;
public int _max_leaves = 0;
// XGBoost specific options
public TreeMethod _tree_method = TreeMethod.auto;
public GrowPolicy _grow_policy = GrowPolicy.depthwise;
public Booster _booster = Booster.gbtree;
public DMatrixType _dmatrix_type = DMatrixType.auto;
public float _reg_lambda = 1;
public float _reg_alpha = 0;
public float _scale_pos_weight = 1;
// Platt scaling (by default)
public boolean _calibrate_model;
public Key<Frame> _calibration_frame;
public CalibrationHelper.CalibrationMethod _calibration_method = CalibrationHelper.CalibrationMethod.AUTO;
// Dart specific (booster == dart)
public DartSampleType _sample_type = DartSampleType.uniform;
public DartNormalizeType _normalize_type = DartNormalizeType.tree;
public float _rate_drop = 0;
public boolean _one_drop = false;
public float _skip_drop = 0;
public int[] _gpu_id; // which GPU to use
public Backend _backend = Backend.auto;
// GBLiner specific (booster == gblinear)
// lambda, alpha support also for gbtree
public FeatureSelector _feature_selector = FeatureSelector.cyclic;
public int _top_k;
public Updater _updater;
public String _eval_metric;
public boolean _score_eval_metric_only;
public String algoName() { return "XGBoost"; }
public String fullName() { return "XGBoost"; }
public String javaName() { return XGBoostModel.class.getName(); }
@Override
public long progressUnits() {
return _ntrees;
}
/**
* Finds parameter settings that are not available on GPU backend.
* In this case the CPU backend should be used instead of GPU.
* @return map of parameter name -> parameter value
*/
Map<String, Object> gpuIncompatibleParams() {
Map<String, Object> incompat = new HashMap<>();
if (!(TreeMethod.auto == _tree_method || TreeMethod.hist == _tree_method) && Booster.gblinear != _booster) {
incompat.put("tree_method", "Only auto and hist are supported tree_method on GPU backend.");
}
if (_max_depth > 15 || _max_depth < 1) {
incompat.put("max_depth", _max_depth + " . Max depth must be greater than 0 and lower than 16 for GPU backend.");
}
if (_grow_policy == GrowPolicy.lossguide)
incompat.put("grow_policy", GrowPolicy.lossguide); // See PUBDEV-5302 (param.grow_policy != TrainParam::kLossGuide Loss guided growth policy not supported. Use CPU algorithm.)
return incompat;
}
Map<String, Integer> monotoneConstraints() {
if (_monotone_constraints == null || _monotone_constraints.length == 0) {
return Collections.emptyMap();
}
Map<String, Integer> constraints = new HashMap<>(_monotone_constraints.length);
for (KeyValue constraint : _monotone_constraints) {
final double val = constraint.getValue();
if (val == 0) {
continue;
}
if (constraints.containsKey(constraint.getKey())) {
throw new IllegalStateException("Duplicate definition of constraint for feature '" + constraint.getKey() + "'.");
}
final int direction = val < 0 ? -1 : 1;
constraints.put(constraint.getKey(), direction);
}
return constraints;
}
@Override
public int getNTrees() {
return _ntrees;
}
@Override
public Frame getCalibrationFrame() {
return _calibration_frame != null ? _calibration_frame.get() : null;
}
@Override
public boolean calibrateModel() {
return _calibrate_model;
}
@Override
public CalibrationHelper.CalibrationMethod getCalibrationMethod() {
return _calibration_method;
}
@Override
public void setCalibrationMethod(CalibrationHelper.CalibrationMethod calibrationMethod) {
_calibration_method = calibrationMethod;
}
@Override
public Parameters getParams() {
return this;
}
static String[] CHECKPOINT_NON_MODIFIABLE_FIELDS = {
"_tree_method", "_grow_policy", "_booster", "_sample_rate", "_max_depth", "_min_rows"
};
}
@Override
public ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain) {
switch(_output.getModelCategory()) {
case Binomial: return new ModelMetricsBinomial.MetricBuilderBinomial(domain);
case Multinomial: return new ModelMetricsMultinomial.MetricBuilderMultinomial(_output.nclasses(), domain, _parms._auc_type);
case Regression: return new ModelMetricsRegression.MetricBuilderRegression();
default: throw H2O.unimpl();
}
}
public XGBoostModel(Key<XGBoostModel> selfKey, XGBoostParameters parms, XGBoostOutput output, Frame train, Frame valid) {
super(selfKey,parms,output);
final DataInfo dinfo = makeDataInfo(train, valid, _parms);
DKV.put(dinfo);
setDataInfoToOutput(dinfo);
model_info = new XGBoostModelInfo(parms, dinfo);
}
@Override
public void initActualParamValues() {
super.initActualParamValues();
EffectiveParametersUtils.initFoldAssignment(_parms);
_parms._backend = getActualBackend(_parms, true);
_parms._tree_method = getActualTreeMethod(_parms);
EffectiveParametersUtils.initCalibrationMethod(_parms);
}
public static XGBoostParameters.TreeMethod getActualTreeMethod(XGBoostParameters p) {
// tree_method parameter is evaluated according to:
// https://github.com/h2oai/xgboost/blob/96f61fb3be8c4fa0e160dd6e82677dfd96a5a9a1/src/gbm/gbtree.cc#L127
// + we don't use external-memory data matrix feature in h2o
// + https://github.com/h2oai/h2o-3/blob/b68e544d8dac3c5c0ed16759e6bf7e8288573ab5/h2o-extensions/xgboost/src/main/java/hex/tree/xgboost/XGBoostModel.java#L348
if ( p._tree_method == XGBoostModel.XGBoostParameters.TreeMethod.auto) {
if (p._backend == XGBoostParameters.Backend.gpu) {
return XGBoostParameters.TreeMethod.hist;
} else if (H2O.getCloudSize() > 1) {
if (p._monotone_constraints != null && p._booster != XGBoostParameters.Booster.gblinear && p._backend != XGBoostParameters.Backend.gpu) {
return XGBoostParameters.TreeMethod.hist;
} else {
return XGBoostModel.XGBoostParameters.TreeMethod.approx;
}
} else if (p.train() != null && p.train().numRows() >= (4 << 20)) {
return XGBoostModel.XGBoostParameters.TreeMethod.approx;
} else {
return XGBoostModel.XGBoostParameters.TreeMethod.exact;
}
} else {
return p._tree_method;
}
}
public void initActualParamValuesAfterOutputSetup(boolean isClassifier, int nclasses) {
EffectiveParametersUtils.initStoppingMetric(_parms, isClassifier);
EffectiveParametersUtils.initCategoricalEncoding(_parms, Parameters.CategoricalEncodingScheme.OneHotInternal);
EffectiveParametersUtils.initDistribution(_parms, nclasses);
_parms._dmatrix_type = _output._sparse ? XGBoostModel.XGBoostParameters.DMatrixType.sparse : XGBoostModel.XGBoostParameters.DMatrixType.dense;
}
public static XGBoostParameters.Backend getActualBackend(XGBoostParameters p, boolean verbose) {
Consumer<String> log = verbose ? LOG::info : LOG::debug;
if ( p._backend == XGBoostParameters.Backend.auto || p._backend == XGBoostParameters.Backend.gpu ) {
if (H2O.getCloudSize() > 1 && !p._build_tree_one_node && !XGBoost.allowMultiGPU()) {
log.accept("GPU backend not supported in distributed mode. Using CPU backend.");
return XGBoostParameters.Backend.cpu;
} else if (! p.gpuIncompatibleParams().isEmpty()) {
log.accept("GPU backend not supported for the choice of parameters (" + p.gpuIncompatibleParams() + "). Using CPU backend.");
return XGBoostParameters.Backend.cpu;
} else if (hasGPU(H2O.CLOUD.members()[0], p._gpu_id)) {
log.accept("Using GPU backend (gpu_id: " + Arrays.toString(p._gpu_id) + ").");
return XGBoostParameters.Backend.gpu;
} else {
log.accept("No GPU (gpu_id: " + Arrays.toString(p._gpu_id) + ") found. Using CPU backend.");
return XGBoostParameters.Backend.cpu;
}
} else {
log.accept("Using CPU backend.");
return XGBoostParameters.Backend.cpu;
}
}
public static Map<String, Object> createParamsMap(XGBoostParameters p, int nClasses, String[] coefNames) {
Map<String, Object> params = new HashMap<>();
// Common parameters with H2O GBM
if (p._n_estimators != 0) {
LOG.info("Using user-provided parameter n_estimators instead of ntrees.");
params.put("nround", p._n_estimators);
p._ntrees = p._n_estimators;
} else {
params.put("nround", p._ntrees);
p._n_estimators = p._ntrees;
}
if (p._eta != 0.3) {
params.put("eta", p._eta);
p._learn_rate = p._eta;
} else {
params.put("eta", p._learn_rate);
p._eta = p._learn_rate;
}
params.put("max_depth", p._max_depth);
if (System.getProperty(PROP_VERBOSITY) != null) {
params.put("verbosity", System.getProperty(PROP_VERBOSITY));
} else {
params.put("silent", p._quiet_mode);
}
if (p._subsample != 1.0) {
params.put("subsample", p._subsample);
p._sample_rate = p._subsample;
} else {
params.put("subsample", p._sample_rate);
p._subsample = p._sample_rate;
}
if (p._colsample_bytree != 1.0) {
params.put("colsample_bytree", p._colsample_bytree);
p._col_sample_rate_per_tree = p._colsample_bytree;
} else {
params.put("colsample_bytree", p._col_sample_rate_per_tree);
p._colsample_bytree = p._col_sample_rate_per_tree;
}
if (p._colsample_bylevel != 1.0) {
params.put("colsample_bylevel", p._colsample_bylevel);
p._col_sample_rate = p._colsample_bylevel;
} else {
params.put("colsample_bylevel", p._col_sample_rate);
p._colsample_bylevel = p._col_sample_rate;
}
if (p._colsample_bynode != 1.0) {
params.put("colsample_bynode", p._colsample_bynode);
}
if (p._max_delta_step != 0) {
params.put("max_delta_step", p._max_delta_step);
p._max_abs_leafnode_pred = p._max_delta_step;
} else {
params.put("max_delta_step", p._max_abs_leafnode_pred);
p._max_delta_step = p._max_abs_leafnode_pred;
}
params.put("seed", (int)(p._seed % Integer.MAX_VALUE));
// XGBoost specific options
params.put("grow_policy", p._grow_policy.toString());
if (p._grow_policy == XGBoostParameters.GrowPolicy.lossguide) {
params.put("max_bin", p._max_bins);
params.put("max_leaves", p._max_leaves);
}
params.put("booster", p._booster.toString());
if (p._booster == XGBoostParameters.Booster.dart) {
params.put("sample_type", p._sample_type.toString());
params.put("normalize_type", p._normalize_type.toString());
params.put("rate_drop", p._rate_drop);
params.put("one_drop", p._one_drop ? "1" : "0");
params.put("skip_drop", p._skip_drop);
}
if (p._booster == XGBoostParameters.Booster.gblinear) {
params.put("feature_selector", p._feature_selector.toString());
params.put("top_k", p._top_k);
}
XGBoostParameters.Backend actualBackend = getActualBackend(p, true);
XGBoostParameters.TreeMethod actualTreeMethod = getActualTreeMethod(p);
if (actualBackend == XGBoostParameters.Backend.gpu) {
if (p._gpu_id != null && p._gpu_id.length > 0) {
params.put("gpu_id", p._gpu_id[0]);
} else {
params.put("gpu_id", 0);
}
// we are setting updater rather than tree_method here to keep CPU predictor, which is faster
if (p._booster == XGBoostParameters.Booster.gblinear && p._updater == null) {
LOG.info("Using gpu_coord_descent updater.");
params.put("updater", XGBoostParameters.Updater.gpu_coord_descent.toString());
} else {
LOG.info("Using gpu_hist tree method.");
params.put("max_bin", p._max_bins);
params.put("tree_method", XGBoostParameters.Updater.gpu_hist.toString());
}
} else if (p._booster == XGBoostParameters.Booster.gblinear && p._updater == null) {
LOG.info("Using coord_descent updater.");
params.put("updater", XGBoostParameters.Updater.coord_descent.toString());
} else if (H2O.CLOUD.size() > 1 && p._tree_method == XGBoostParameters.TreeMethod.auto &&
p._monotone_constraints != null) {
LOG.info("Using hist tree method for distributed computation with monotone_constraints.");
params.put("tree_method", actualTreeMethod.toString());
params.put("max_bin", p._max_bins);
} else {
LOG.info("Using " + p._tree_method.toString() + " tree method.");
params.put("tree_method", actualTreeMethod.toString());
if (p._tree_method == XGBoostParameters.TreeMethod.hist) {
params.put("max_bin", p._max_bins);
}
}
if (p._updater != null) {
LOG.info("Using user-provided updater.");
params.put("updater", p._updater.toString());
}
if (p._min_child_weight != 1) {
LOG.info("Using user-provided parameter min_child_weight instead of min_rows.");
params.put("min_child_weight", p._min_child_weight);
p._min_rows = p._min_child_weight;
} else {
params.put("min_child_weight", p._min_rows);
p._min_child_weight = p._min_rows;
}
if (p._gamma != 0) {
LOG.info("Using user-provided parameter gamma instead of min_split_improvement.");
params.put("gamma", p._gamma);
p._min_split_improvement = p._gamma;
} else {
params.put("gamma", p._min_split_improvement);
p._gamma = p._min_split_improvement;
}
params.put("lambda", p._reg_lambda);
params.put("alpha", p._reg_alpha);
if (p._scale_pos_weight != 1)
params.put("scale_pos_weight", p._scale_pos_weight);
// objective function
if (nClasses==2) {
params.put("objective", ObjectiveType.BINARY_LOGISTIC.getId());
} else if (nClasses==1) {
if (p._distribution == DistributionFamily.gamma) {
params.put("objective", ObjectiveType.REG_GAMMA.getId());
} else if (p._distribution == DistributionFamily.tweedie) {
params.put("objective", ObjectiveType.REG_TWEEDIE.getId());
params.put("tweedie_variance_power", p._tweedie_power);
} else if (p._distribution == DistributionFamily.poisson) {
params.put("objective", ObjectiveType.COUNT_POISSON.getId());
} else if (p._distribution == DistributionFamily.gaussian || p._distribution == DistributionFamily.AUTO) {
params.put("objective", ObjectiveType.REG_SQUAREDERROR.getId());
} else {
throw new UnsupportedOperationException("No support for distribution=" + p._distribution.toString());
}
} else {
params.put("objective", ObjectiveType.MULTI_SOFTPROB.getId());
params.put("num_class", nClasses);
}
assert ObjectiveType.fromXGBoost((String) params.get("objective")) != null;
// evaluation metric
if (p._eval_metric != null) {
params.put("eval_metric", p._eval_metric);
}
final int nthreadMax = getMaxNThread();
final int nthread = p._nthread != -1 ? Math.min(p._nthread, nthreadMax) : nthreadMax;
if (nthread < p._nthread) {
LOG.warn("Requested nthread=" + p._nthread + " but the cluster has only " + nthreadMax + " available." +
"Training will use nthread=" + nthread + " instead of the user specified value.");
}
params.put("nthread", nthread);
Map<String, Integer> monotoneConstraints = p.monotoneConstraints();
if (! monotoneConstraints.isEmpty()) {
int constraintsUsed = 0;
StringBuilder sb = new StringBuilder();
sb.append("(");
for (String coef : coefNames) {
final String direction;
if (monotoneConstraints.containsKey(coef)) {
direction = monotoneConstraints.get(coef).toString();
constraintsUsed++;
} else {
direction = "0";
}
sb.append(direction);
sb.append(",");
}
sb.replace(sb.length()-1, sb.length(), ")");
params.put("monotone_constraints", sb.toString());
assert constraintsUsed == monotoneConstraints.size();
}
String[][] interactionConstraints = p._interaction_constraints;
if(interactionConstraints != null && interactionConstraints.length > 0) {
if(!p._categorical_encoding.equals(Parameters.CategoricalEncodingScheme.OneHotInternal)){
throw new IllegalArgumentException("No support interaction constraint for categorical encoding = " + p._categorical_encoding.toString()+". Constraint interactions are available only for ``AUTO`` (``one_hot_internal`` or ``OneHotInternal``) categorical encoding.");
}
params.put("interaction_constraints", createInteractions(interactionConstraints, coefNames, p));
}
LOG.info("XGBoost Parameters:");
for (Map.Entry<String,Object> s : params.entrySet()) {
LOG.info(" " + s.getKey() + " = " + s.getValue());
}
LOG.info("");
return Collections.unmodifiableMap(params);
}
private static String createInteractions(String[][] interaction_constraints, String[] coefNames, XGBoostParameters params){
StringBuilder sb = new StringBuilder();
sb.append("[");
for (String[] list : interaction_constraints) {
sb.append("[");
for (String item : list) {
if(item.equals(params._response_column)){
throw new IllegalArgumentException("'interaction_constraints': Column with the name '" + item + "'is used as response column and cannot be used in interaction.");
}
if(item.equals(params._weights_column)){
throw new IllegalArgumentException("'interaction_constraints': Column with the name '" + item + "'is used as weights column and cannot be used in interaction.");
}
if(item.equals(params._fold_column)){
throw new IllegalArgumentException("'interaction_constraints': Column with the name '" + item + "'is used as fold column and cannot be used in interaction.");
}
if(params._ignored_columns != null && ArrayUtils.find(params._ignored_columns, item) != -1) {
throw new IllegalArgumentException("'interaction_constraints': Column with the name '" + item + "'is set in ignored columns and cannot be used in interaction.");
}
// first find only name
int start = ArrayUtils.findWithPrefix(coefNames, item);
// find start index and add indices until end index
if (start == -1) {
throw new IllegalArgumentException("'interaction_constraints': Column with name '" + item + "' is not in the frame.");
} else if(start > -1){ // find exact position - no encoding
sb.append(start).append(",");
} else { // find first occur of the name with prefix - encoding
start = -start - 2;
assert coefNames[start].startsWith(item): "The column name should be find correctly.";
// iterate until find all encoding indices
int end = start;
while (end < coefNames.length && coefNames[end].startsWith(item)) {
sb.append(end).append(",");
end++;
}
}
}
sb.replace(sb.length() - 1, sb.length(), "],");
}
sb.replace(sb.length() - 1, sb.length(), "]");
return sb.toString();
}
public static BoosterParms createParams(XGBoostParameters p, int nClasses, String[] coefNames) {
return BoosterParms.fromMap(createParamsMap(p, nClasses, coefNames));
}
/** Performs deep clone of given model. */
protected XGBoostModel deepClone(Key<XGBoostModel> result) {
XGBoostModel newModel = IcedUtils.deepCopy(this);
newModel._key = result;
// Do not clone model metrics
newModel._output.clearModelMetrics(false);
newModel._output._training_metrics = null;
newModel._output._validation_metrics = null;
return newModel;
}
static int getMaxNThread() {
if (System.getProperty(PROP_NTHREAD) != null) {
return Integer.getInteger(PROP_NTHREAD);
} else {
int maxNodesPerHost = 1;
Set<String> checkedNodes = new HashSet<>();
for (H2ONode node : H2O.CLOUD.members()) {
String nodeHost = node.getIp();
if (!checkedNodes.contains(nodeHost)) {
checkedNodes.add(nodeHost);
long cnt = Stream.of(H2O.CLOUD.members()).filter(h -> h.getIp().equals(nodeHost)).count();
if (cnt > maxNodesPerHost) {
maxNodesPerHost = (int) cnt;
}
}
}
return Math.max(1, H2O.ARGS.nthreads / maxNodesPerHost);
}
}
@Override protected AutoBuffer writeAll_impl(AutoBuffer ab) {
ab.putKey(model_info.getDataInfoKey());
ab.putKey(model_info.getAuxNodeWeightsKey());
return super.writeAll_impl(ab);
}
@Override protected Keyed readAll_impl(AutoBuffer ab, Futures fs) {
ab.getKey(model_info.getDataInfoKey(), fs);
ab.getKey(model_info.getAuxNodeWeightsKey(), fs);
return super.readAll_impl(ab, fs);
}
@Override
public XGBoostMojoWriter getMojo() {
return new XGBoostMojoWriter(this);
}
private ModelMetrics makeMetrics(Frame data, Frame originalData, boolean isTrain, String description) {
LOG.debug("Making metrics: " + description);
return new XGBoostModelMetrics(_output, data, originalData, isTrain, this, CFuncRef.from(_parms._custom_metric_func)).compute();
}
final void doScoring(Frame train, Frame trainOrig, CustomMetric trainCustomMetric,
Frame valid, Frame validOrig, CustomMetric validCustomMetric) {
ModelMetrics mm = makeMetrics(train, trainOrig, true, "Metrics reported on training frame");
_output._training_metrics = mm;
if (trainCustomMetric == null) {
_output._scored_train[_output._ntrees].fillFrom(mm, mm._custom_metric);
} else {
_output._scored_train[_output._ntrees].fillFrom(mm, trainCustomMetric);
}
addModelMetrics(mm);
// Optional validation part
if (valid != null) {
mm = makeMetrics(valid, validOrig, false, "Metrics reported on validation frame");
_output._validation_metrics = mm;
if (validCustomMetric == null) {
_output._scored_valid[_output._ntrees].fillFrom(mm, mm._custom_metric);
} else {
_output._scored_valid[_output._ntrees].fillFrom(mm, validCustomMetric);
}
addModelMetrics(mm);
}
}
@Override
protected Frame postProcessPredictions(Frame adaptedFrame, Frame predictFr, Job j) {
return CalibrationHelper.postProcessPredictions(predictFr, j, _output);
}
@Override
protected double[] score0(double[] data, double[] preds) {
return score0(data, preds, 0.0);
}
@Override // per row scoring is slow and should be avoided!
public double[] score0(final double[] data, final double[] preds, final double offset) {
final DataInfo di = model_info.dataInfo();
assert di != null;
MutableOneHotEncoderFVec row = new MutableOneHotEncoderFVec(di, _output._sparse);
row.setInput(data);
Predictor predictor = makePredictor(true);
float[] out;
if (_output.hasOffset()) {
out = predictor.predict(row, (float) offset);
} else if (offset != 0) {
throw new UnsupportedOperationException("Unsupported: offset != 0");
} else {
out = predictor.predict(row);
}
return XGBoostMojoModel.toPreds(data, out, preds, _output.nclasses(), _output._priorClassDist, defaultThreshold());
}
@Override
protected XGBoostBigScorePredict setupBigScorePredict(BigScore bs) {
return setupBigScorePredict(false);
}
public XGBoostBigScorePredict setupBigScorePredict(boolean isTrain) {
DataInfo di = model_info().scoringInfo(isTrain); // always for validation scoring info for scoring (we are not in the training phase)
return PredictConfiguration.useJavaScoring() ? setupBigScorePredictJava(di) : setupBigScorePredictNative(di);
}
private XGBoostBigScorePredict setupBigScorePredictNative(DataInfo di) {
BoosterParms boosterParms = XGBoostModel.createParams(_parms, _output.nclasses(), di.coefNames());
return new XGBoostNativeBigScorePredict(model_info, _parms, _output, di, boosterParms, defaultThreshold());
}
private XGBoostBigScorePredict setupBigScorePredictJava(DataInfo di) {
return new XGBoostJavaBigScorePredict(model_info, _output, di, _parms, defaultThreshold());
}
public XGBoostVariableImportance setupVarImp() {
if (PredictConfiguration.useJavaScoring()) {
return new XGBoostJavaVariableImportance(model_info);
} else {
return new XGBoostNativeVariableImportance(_key, model_info.getFeatureMap());
}
}
@Override
public Frame scoreContributions(Frame frame, Key<Frame> destination_key) {
return scoreContributions(frame, destination_key, null, new ContributionsOptions());
}
@Override
public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Frame> j, ContributionsOptions options) {
Frame adaptFrm = new Frame(frame);
adaptTestForTrain(adaptFrm, true, false);
DataInfo di = model_info().dataInfo();
assert di != null;
final String[] featureContribNames = ContributionsOutputFormat.Compact.equals(options._outputFormat) ?
_output.features() : di.coefNames();
final String[] outputNames = ArrayUtils.append(featureContribNames, "BiasTerm");
if (options.isSortingRequired()) {
final ContributionComposer contributionComposer = new ContributionComposer();
int topNAdjusted = contributionComposer.checkAndAdjustInput(options._topN, featureContribNames.length);
int bottomNAdjusted = contributionComposer.checkAndAdjustInput(options._bottomN, featureContribNames.length);
int outputSize = Math.min((topNAdjusted+bottomNAdjusted)*2, featureContribNames.length*2);
String[] names = new String[outputSize+1];
byte[] types = new byte[outputSize+1];
String[][] domains = new String[outputSize+1][outputNames.length];
composeScoreContributionTaskMetadata(names, types, domains, featureContribNames, options);
return new PredictTreeSHAPSortingTask(di, model_info(), _output, options)
.withPostMapAction(JobUpdatePostMap.forJob(j))
.doAll(types, adaptFrm)
.outputFrame(destination_key, names, domains);
}
return new PredictTreeSHAPTask(di, model_info(), _output, options)
.withPostMapAction(JobUpdatePostMap.forJob(j))
.doAll(outputNames.length, Vec.T_NUM, adaptFrm)
.outputFrame(destination_key, outputNames, null);
}
@Override
public Frame scoreContributions(Frame frame, Key<Frame> destination_key, Job<Frame> j, ContributionsOptions options, Frame backgroundFrame) {
Log.info("Starting contributions calculation for " + this._key + "...");
try (Scope.Safe s = Scope.safe(frame, backgroundFrame)) {
Frame contributions;
if (null == backgroundFrame) {
contributions = scoreContributions(frame, destination_key, j, options);
} else {
Frame adaptedFrame = adaptFrameForScore(frame, false);
DKV.put(adaptedFrame);
Frame adaptedBgFrame = adaptFrameForScore(backgroundFrame, false);
DKV.put(adaptedBgFrame);
DataInfo di = model_info().dataInfo();
assert di != null;
final String[] featureContribNames = ContributionsOutputFormat.Compact.equals(options._outputFormat) ?
_output.features() : di.coefNames();
final String[] outputNames = ArrayUtils.append(featureContribNames, "BiasTerm");
contributions = new PredictTreeSHAPWithBackgroundTask(di, model_info(), _output, options,
adaptedFrame, adaptedBgFrame, options._outputPerReference, options._outputSpace)
.runAndGetOutput(j, destination_key, outputNames);
}
return Scope.untrack(contributions);
} finally {
Log.info("Finished contributions calculation for " + this._key + "...");
}
}
@Override
public UpdateAuxTreeWeightsReport updateAuxTreeWeights(Frame frame, String weightsColumn) {
if (weightsColumn == null) {
throw new IllegalArgumentException("Weights column name is not defined");
}
Frame adaptFrm = new Frame(frame);
Vec weights = adaptFrm.remove(weightsColumn);
if (weights == null) {
throw new IllegalArgumentException("Input frame doesn't contain weights column `" + weightsColumn + "`");
}
adaptTestForTrain(adaptFrm, true, false);
// keep features only and re-introduce weights column at the end of the frame
Frame featureFrm = new Frame(_output.features(), frame.vecs(_output.features()));
featureFrm.add(weightsColumn, weights);
DataInfo di = model_info().dataInfo();
assert di != null;
double[][] nodeWeights = new UpdateAuxTreeWeightsTask(_parms._distribution, di, model_info(), _output)
.doAll(featureFrm)
.getNodeWeights();
AuxNodeWeights auxNodeWeights = new AuxNodeWeights(model_info().getAuxNodeWeightsKey(), nodeWeights);
DKV.put(auxNodeWeights);
UpdateAuxTreeWeightsReport report = new UpdateAuxTreeWeightsReport();
report._warn_classes = new int[0];
report._warn_trees = new int[0];
for (int treeId = 0; treeId < nodeWeights.length; treeId++) {
if (nodeWeights[treeId] == null)
continue;
for (double w : nodeWeights[treeId]) {
if (w == 0) {
report._warn_trees = ArrayUtils.append(report._warn_trees, treeId);
report._warn_classes = ArrayUtils.append(report._warn_classes, 0);
break;
}
}
}
return report;
}
@Override
public Frame scoreLeafNodeAssignment(
Frame frame, LeafNodeAssignmentType type, Key<Frame> destination_key
) {
AssignLeafNodeTask task = AssignLeafNodeTask.make(model_info.scoringInfo(false), _output, model_info._boosterBytes, type);
Frame adaptFrm = new Frame(frame);
adaptTestForTrain(adaptFrm, true, false);
return task.execute(adaptFrm, destination_key);
}
private void setDataInfoToOutput(DataInfo dinfo) {
_output.setNames(dinfo._adaptedFrame.names(), dinfo._adaptedFrame.typesStr());
_output._domains = dinfo._adaptedFrame.domains();
_output._nums = dinfo._nums;
_output._cats = dinfo._cats;
_output._catOffsets = dinfo._catOffsets;
_output._useAllFactorLevels = dinfo._useAllFactorLevels;
}
@Override
protected Futures remove_impl(Futures fs, boolean cascade) {
DataInfo di = model_info().dataInfo();
if (di != null) {
di.remove(fs);
}
AuxNodeWeights anw = model_info().auxNodeWeights();
if (anw != null) {
anw.remove(fs);
}
if (_output._calib_model != null)
_output._calib_model.remove(fs);
return super.remove_impl(fs, cascade);
}
@Override
public SharedTreeGraph convert(final int treeNumber, final String treeClassName) {
GradBooster booster = XGBoostJavaMojoModel
.makePredictor(model_info._boosterBytes, model_info.auxNodeWeightBytes())
.getBooster();
if (!(booster instanceof GBTree)) {
throw new IllegalArgumentException("XGBoost model is not backed by a tree-based booster. Booster class is " +
booster.getClass().getCanonicalName());
}
final RegTree[][] groupedTrees = ((GBTree) booster).getGroupedTrees();
final int treeClass = getXGBoostClassIndex(treeClassName);
if (treeClass >= groupedTrees.length) {
throw new IllegalArgumentException(String.format("Given XGBoost model does not have given class '%s'.", treeClassName));
}
final RegTree[] treesInGroup = groupedTrees[treeClass];
if (treeNumber >= treesInGroup.length || treeNumber < 0) {
throw new IllegalArgumentException(String.format("There is no such tree number for given class. Total number of trees is %d.", treesInGroup.length));
}
final RegTreeNode[] treeNodes = treesInGroup[treeNumber].getNodes();
final RegTreeNodeStat[] treeNodeStats = treesInGroup[treeNumber].getStats();
assert treeNodes.length >= 1;
SharedTreeGraph sharedTreeGraph = new SharedTreeGraph();
final SharedTreeSubgraph sharedTreeSubgraph = sharedTreeGraph.makeSubgraph(_output._training_metrics._description);
final XGBoostUtils.FeatureProperties featureProperties = XGBoostUtils.assembleFeatureNames(model_info.dataInfo()); // XGBoost's usage of one-hot encoding assumed
constructSubgraph(treeNodes, treeNodeStats, sharedTreeSubgraph.makeRootNode(), 0, sharedTreeSubgraph, featureProperties, true); // Root node is at index 0
return sharedTreeGraph;
}
private static void constructSubgraph(final RegTreeNode[] xgBoostNodes, final RegTreeNodeStat[] xgBoostNodeStats, final SharedTreeNode sharedTreeNode,
final int nodeIndex, final SharedTreeSubgraph sharedTreeSubgraph,
final XGBoostUtils.FeatureProperties featureProperties, boolean inclusiveNA) {
final RegTreeNode xgBoostNode = xgBoostNodes[nodeIndex];
final RegTreeNodeStat xgBoostNodeStat = xgBoostNodeStats[nodeIndex];
// Not testing for NaNs, as SharedTreeNode uses NaNs as default values.
//No domain set, as the structure mimics XGBoost's tree, which is numeric-only
if (featureProperties._oneHotEncoded[xgBoostNode.getSplitIndex()]) {
//Shared tree model uses < to the left and >= to the right. Transforiming one-hot encoded categoricals
// from 0 to 1 makes it fit the current split description logic
sharedTreeNode.setSplitValue(1.0F);
} else {
sharedTreeNode.setSplitValue(xgBoostNode.getSplitCondition());
}
sharedTreeNode.setPredValue(xgBoostNode.getLeafValue());
sharedTreeNode.setInclusiveNa(inclusiveNA);
sharedTreeNode.setNodeNumber(nodeIndex);
sharedTreeNode.setGain(xgBoostNodeStat.getGain());
sharedTreeNode.setWeight(xgBoostNodeStat.getCover());
if (!xgBoostNode.isLeaf()) {
sharedTreeNode.setCol(xgBoostNode.getSplitIndex(), featureProperties._names[xgBoostNode.getSplitIndex()]);
constructSubgraph(xgBoostNodes, xgBoostNodeStats, sharedTreeSubgraph.makeLeftChildNode(sharedTreeNode),
xgBoostNode.getLeftChildIndex(), sharedTreeSubgraph, featureProperties, xgBoostNode.default_left());
constructSubgraph(xgBoostNodes, xgBoostNodeStats, sharedTreeSubgraph.makeRightChildNode(sharedTreeNode),
xgBoostNode.getRightChildIndex(), sharedTreeSubgraph, featureProperties, !xgBoostNode.default_left());
}
}
@Override
public SharedTreeGraph convert(int treeNumber, String treeClass, ConvertTreeOptions options) {
return convert(treeNumber, treeClass); // options are currently not applicable to in-H2O conversion
}
private int getXGBoostClassIndex(final String treeClass) {
final ModelCategory modelCategory = _output.getModelCategory();
if(ModelCategory.Regression.equals(modelCategory) && (treeClass != null && !treeClass.isEmpty())){
throw new IllegalArgumentException("There should be no tree class specified for regression.");
}
if ((treeClass == null || treeClass.isEmpty())) {
// Binomial & regression problems do not require tree class to be specified, as there is only one available.
// Such class is selected automatically for the user.
switch (modelCategory) {
case Binomial:
case Regression:
return 0;
default:
// If the user does not specify tree class explicitely and there are multiple options to choose from,
// throw an error.
throw new IllegalArgumentException(String.format("Model category '%s' requires tree class to be specified.",
modelCategory));
}
}
final String[] domain = _output._domains[_output._domains.length - 1];
final int treeClassIndex = ArrayUtils.find(domain, treeClass);
if (ModelCategory.Binomial.equals(modelCategory) && treeClassIndex != 0) {
throw new IllegalArgumentException(String.format("For binomial XGBoost model, only one tree for class %s has been built.", domain[0]));
} else if (treeClassIndex < 0) {
throw new IllegalArgumentException(String.format("No such class '%s' in tree.", treeClass));
}
return treeClassIndex;
}
@Override
public boolean isFeatureUsedInPredict(String featureName) {
int featureIdx = ArrayUtils.find(_output._varimp._names, featureName);
if (featureIdx == -1 && _output._catOffsets.length > 1) { // feature is possibly categorical
featureIdx = ArrayUtils.find(_output._names, featureName);
if (featureIdx == -1 || !_output._column_types[featureIdx].equals("Enum")) return false;
for (int i = 0; i < _output._varimp._names.length; i++) {
if (_output._varimp._names[i].startsWith(featureName.concat(".")) && _output._varimp._varimp[i] != 0){
return true;
}
}
return false;
}
return featureIdx != -1 && _output._varimp._varimp[featureIdx] != 0d;
}
//--------------------------------------------------------------------------------------------------------------------
// Serialization into a POJO
//--------------------------------------------------------------------------------------------------------------------
@Override
protected boolean toJavaCheckTooBig() {
return _output == null || _output._ntrees * _parms._max_depth > 1000;
}
@Override protected SBPrintStream toJavaInit(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
sb.nl();
sb.ip("public boolean isSupervised() { return true; }").nl();
sb.ip("public int nclasses() { return ").p(_output.nclasses()).p("; }").nl();
return sb;
}
@Override
protected void toJavaPredictBody(
SBPrintStream sb, CodeGeneratorPipeline classCtx, CodeGeneratorPipeline fileCtx, boolean verboseCode
) {
final String namePrefix = JCodeGen.toJavaId(_key.toString());
Predictor p = makePredictor(false);
XGBoostPojoWriter.make(p, namePrefix, _output, defaultThreshold()).renderJavaPredictBody(sb, fileCtx);
}
public FeatureInteractions getFeatureInteractions(int maxInteractionDepth, int maxTreeDepth, int maxDeepening) {
FeatureInteractions featureInteractions = new FeatureInteractions();
for (int i = 0; i < this._parms._ntrees; i++) {
FeatureInteractions currentTreeFeatureInteractions = new FeatureInteractions();
SharedTreeGraph sharedTreeGraph = convert(i, null);
assert sharedTreeGraph.subgraphArray.size() == 1;
SharedTreeSubgraph tree = sharedTreeGraph.subgraphArray.get(0);
List<SharedTreeNode> interactionPath = new ArrayList<>();
Set<String> memo = new HashSet<>();
FeatureInteractions.collectFeatureInteractions(tree.rootNode, interactionPath, 0, 0, 1, 0, 0,
currentTreeFeatureInteractions, memo, maxInteractionDepth, maxTreeDepth, maxDeepening, i, false);
featureInteractions.mergeWith(currentTreeFeatureInteractions);
}
return featureInteractions;
}
@Override
public TwoDimTable[][] getFeatureInteractionsTable(int maxInteractionDepth, int maxTreeDepth, int maxDeepening) {
return FeatureInteractions.getFeatureInteractionsTable(this.getFeatureInteractions(maxInteractionDepth,maxTreeDepth,maxDeepening));
}
Predictor makePredictor(boolean scoringOnly) {
return PredictorFactory.makePredictor(model_info._boosterBytes, model_info.auxNodeWeightBytes(), scoringOnly);
}
protected Frame removeSpecialNNonNumericColumns(Frame frame) {
Frame adaptFrm = new Frame(frame);
adaptTestForTrain(adaptFrm, true, false);
// remove non-feature columns
adaptFrm.remove(_parms._response_column);
adaptFrm.remove(_parms._fold_column);
adaptFrm.remove(_parms._weights_column);
adaptFrm.remove(_parms._offset_column);
// remove non-numeric columns
int numCols = adaptFrm.numCols()-1;
for (int index=numCols; index>=0; index--) {
if (!adaptFrm.vec(index).isNumeric())
adaptFrm.remove(index);
}
return adaptFrm;
}
@Override
public double getFriedmanPopescusH(Frame frame, String[] vars) {
Frame adaptFrm = removeSpecialNNonNumericColumns(frame);
for(int colId = 0; colId < adaptFrm.numCols(); colId++) {
Vec col = adaptFrm.vec(colId);
if (col.isBad()) {
throw new UnsupportedOperationException(
"Calculating of H statistics error: column " + adaptFrm.name(colId) + " is missing.");
}
if(!col.isNumeric()) {
throw new UnsupportedOperationException(
"Calculating of H statistics error: column " + adaptFrm.name(colId) + " is not numeric.");
}
}
int nclasses = this._output.nclasses() > 2 ? this._output.nclasses() : 1;
SharedTreeSubgraph[][] sharedTreeSubgraphs = new SharedTreeSubgraph[this._parms._ntrees][nclasses];
for (int i = 0; i < this._parms._ntrees; i++) {
for (int j = 0; j < nclasses; j++) {
SharedTreeGraph graph = this.convert(i, this._output.classNames()[j]);
assert graph.subgraphArray.size() == 1;
sharedTreeSubgraphs[i][j] = graph.subgraphArray.get(0);
}
}
return FriedmanPopescusH.h(adaptFrm, vars, this._parms._learn_rate, sharedTreeSubgraphs);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostModelInfo.java
|
package hex.tree.xgboost;
import hex.DataInfo;
import hex.tree.xgboost.predict.AuxNodeWeights;
import water.Iced;
import water.Key;
import java.util.Arrays;
final public class XGBoostModelInfo extends Iced {
public final XGBoostModel.XGBoostParameters _parameters; // not used, kept for debugging purposes
private final Key<DataInfo> _dataInfoKey;
private final Key<AuxNodeWeights> _auxNodeWeightsKey;
public String _featureMap;
public byte[] _boosterBytes; // internal state of native backend
/**
* Main constructor
* @param origParams Model parameters
*/
public XGBoostModelInfo(final XGBoostModel.XGBoostParameters origParams, DataInfo dinfo) {
_parameters = (XGBoostModel.XGBoostParameters) origParams.clone(); //make a copy, don't change model's parameters
_dataInfoKey = dinfo._key;
_auxNodeWeightsKey = Key.make();
}
public String getFeatureMap() {
return _featureMap;
}
public void setFeatureMap(String featureMap) {
_featureMap = featureMap;
}
public void updateBoosterBytes(byte[] boosterBytes) {
if (boosterBytes != null) {
_boosterBytes = boosterBytes;
}
}
@Override
public int hashCode() {
return Arrays.hashCode(_boosterBytes);
}
// compute model size (number of model parameters required for making predictions)
// momenta are not counted here, but they are needed for model building
public long size() {
long res = 0;
if (_boosterBytes != null) res += _boosterBytes.length;
return res;
}
public Key<DataInfo> getDataInfoKey() {
return _dataInfoKey;
}
public DataInfo dataInfo() {
return _dataInfoKey.get();
}
public DataInfo scoringInfo(boolean isTrain) {
return isTrain ? dataInfo() : dataInfo().scoringInfo();
}
public Key<AuxNodeWeights> getAuxNodeWeightsKey() {
return _auxNodeWeightsKey;
}
public AuxNodeWeights auxNodeWeights() {
return _auxNodeWeightsKey.get();
}
public byte[] auxNodeWeightBytes() {
AuxNodeWeights anw = _auxNodeWeightsKey.get();
return anw != null ? anw._nodeWeightBytes : null;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostMojoWriter.java
|
package hex.tree.xgboost;
import hex.Model;
import hex.ModelMojoWriter;
import hex.glm.GLMModel;
import hex.isotonic.IsotonicRegressionModel;
import hex.tree.CalibrationHelper;
import java.io.IOException;
import java.nio.charset.Charset;
/**
* MOJO support for XGBoost model.
*/
public class XGBoostMojoWriter extends ModelMojoWriter<XGBoostModel, XGBoostModel.XGBoostParameters, XGBoostOutput> {
@SuppressWarnings("unused") // Called through reflection in ModelBuildersHandler
public XGBoostMojoWriter() {}
public XGBoostMojoWriter(XGBoostModel model) {
super(model);
}
@Override public String mojoVersion() {
return "1.10";
}
@Override
protected void writeModelData() throws IOException {
writeblob("boosterBytes", this.model.model_info()._boosterBytes);
byte[] auxNodeWeightBytes = this.model.model_info().auxNodeWeightBytes();
if (auxNodeWeightBytes != null) {
writeblob("auxNodeWeights", auxNodeWeightBytes);
}
writekv("nums", model._output._nums);
writekv("cats", model._output._cats);
writekv("cat_offsets", model._output._catOffsets);
writekv("use_all_factor_levels", model._output._useAllFactorLevels);
writekv("sparse", model._output._sparse);
writekv("booster", model._parms._booster.toString());
writekv("ntrees", model._output._ntrees);
writeblob("feature_map", model.model_info().getFeatureMap().getBytes(Charset.forName("UTF-8")));
writekv("use_java_scoring_by_default", true);
if (model._output.isCalibrated()) {
final CalibrationHelper.CalibrationMethod calibMethod = model._output.getCalibrationMethod();
final Model<?, ?, ?> calibModel = model._output.calibrationModel();
writekv("calib_method", calibMethod.getId());
switch (calibMethod) {
case PlattScaling:
double[] beta = ((GLMModel) calibModel).beta();
assert beta.length == model._output.nclasses(); // n-1 coefficients + 1 intercept
writekv("calib_glm_beta", beta);
break;
case IsotonicRegression:
IsotonicRegressionModel isotonic = (IsotonicRegressionModel) calibModel;
write(isotonic.toIsotonicCalibrator());
break;
default:
throw new UnsupportedOperationException("MOJO is not (yet) support for calibration model " + calibMethod);
}
}
writekv("has_offset", model._output.hasOffset());
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostOutput.java
|
package hex.tree.xgboost;
import hex.Model;
import hex.ModelBuilder;
import hex.ScoreKeeper;
import hex.tree.CalibrationHelper;
import water.util.TwoDimTable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class XGBoostOutput extends Model.Output implements Model.GetNTrees, CalibrationHelper.OutputWithCalibration {
public XGBoostOutput(XGBoost b) {
super(b);
_scored_train = new ScoreKeeper[]{new ScoreKeeper(Double.NaN)};
_scored_valid = new ScoreKeeper[]{new ScoreKeeper(Double.NaN)};
}
int _nums;
int _cats;
int[] _catOffsets;
boolean _useAllFactorLevels;
boolean _useValidForScoreKeeping;
public boolean _sparse;
public int _ntrees;
public ScoreKeeper[/*ntrees+1*/] _scored_train;
public ScoreKeeper[/*ntrees+1*/] _scored_valid;
public ScoreKeeper[] scoreKeepers() {
List<ScoreKeeper> skl = new ArrayList<>();
ScoreKeeper[] ska = trainedWithValidation() ? _scored_valid : _scored_train;
for( ScoreKeeper sk : ska )
if (!sk.isEmpty())
skl.add(sk);
return skl.toArray(new ScoreKeeper[0]);
}
private boolean trainedWithValidation() {
return _validation_metrics != null || _useValidForScoreKeeping;
}
public long[/*ntrees+1*/] _training_time_ms = {System.currentTimeMillis()};
public TwoDimTable _variable_importances; // gain
@Override
public TwoDimTable getVariableImportances() {
return _variable_importances;
}
public TwoDimTable _variable_importances_cover;
public TwoDimTable _variable_importances_frequency;
public XgbVarImp _varimp;
public TwoDimTable _native_parameters;
public Model<?, ?, ?> _calib_model;
@Override
public TwoDimTable createInputFramesInformationTable(ModelBuilder modelBuilder) {
XGBoostModel.XGBoostParameters params = (XGBoostModel.XGBoostParameters) modelBuilder._parms;
TwoDimTable table = super.createInputFramesInformationTable(modelBuilder);
table.set(2, 0, "calibration_frame");
table.set(2, 1, params.getCalibrationFrame() != null ? params.getCalibrationFrame().checksum() : -1);
table.set(2, 2, params.getCalibrationFrame() != null ? Arrays.toString(params.getCalibrationFrame().anyVec().espc()) : -1);
return table;
}
@Override
public int getInformationTableNumRows() {
return super.getInformationTableNumRows() + 1;// +1 row for calibration frame
}
@Override
public int getNTrees() {
return _ntrees;
}
@Override
public Model<?, ?, ?> calibrationModel() {
return _calib_model;
}
@Override
public void setCalibrationModel(Model<?, ?, ?> model) {
_calib_model = model;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostPojoWriter.java
|
package hex.tree.xgboost;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.Dart;
import biz.k11i.xgboost.gbm.GBLinear;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import hex.LinkFunction;
import hex.LinkFunctionFactory;
import hex.genmodel.utils.LinkFunctionType;
import water.codegen.CodeGenerator;
import water.codegen.CodeGeneratorPipeline;
import water.exceptions.JCodeSB;
import water.util.SBPrintStream;
import static hex.genmodel.algos.xgboost.XGBoostMojoModel.ObjectiveType;
public abstract class XGBoostPojoWriter {
public static XGBoostPojoWriter make(
Predictor p,
String namePrefix,
XGBoostOutput output,
double defaultThreshold
) {
if (p.getBooster() instanceof GBTree) {
return new XGBoostPojoTreeWriter(p, namePrefix, output, defaultThreshold);
} else {
return new XGBoostPojoLinearWriter(p, namePrefix, output, defaultThreshold);
}
}
protected final Predictor _p;
protected final String _namePrefix;
protected final XGBoostOutput _output;
private final double _defaultThreshold;
protected XGBoostPojoWriter(Predictor p, String namePrefix, XGBoostOutput output, double defaultThreshold) {
_p = p;
_namePrefix = namePrefix;
_output = output;
_defaultThreshold = defaultThreshold;
}
protected String getFeatureAccessor(int idx) {
if (idx >= _output._catOffsets[_output._cats]) {
int colIdx = idx - _output._catOffsets[_output._cats] + _output._cats;
if (_output._sparse) {
return "(data[" + colIdx + "] == 0 ? Double.NaN : data[" + colIdx + "])";
} else {
return "data[" + colIdx + "]";
}
} else {
int colIdx = 0;
while (idx >= _output._catOffsets[colIdx + 1]) colIdx++;
int colValue = idx - _output._catOffsets[colIdx];
return "(data[" + colIdx + "] == " + colValue + " ? 1 : " + (_output._sparse ? "Float.NaN" : "0") + ")";
}
}
private void renderPredTransformViaLinkFunction(LinkFunctionType type, SBPrintStream sb) {
LinkFunction lf = LinkFunctionFactory.getLinkFunction(type);
sb.ip("preds[0] = (float) ").p(lf.linkInvStringFloat("preds[0]")).p(";").nl();
}
private void renderMultiClassPredTransform(SBPrintStream sb) {
sb.ip("double max = preds[0];").nl();
sb.ip("for (int i = 1; i < preds.length-1; i++) max = Math.max(preds[i], max); ").nl();
sb.ip("double sum = 0.0D;").nl();
sb.ip("for (int i = 0; i < preds.length-1; i++) {").nl();
sb.ip(" preds[i] = Math.exp(preds[i] - max);").nl();
sb.ip(" sum += preds[i];").nl();
sb.ip("}").nl();
sb.ip("for (int i = 0; i < preds.length-1; i++) {").nl();
sb.ip(" preds[i] /= (float) sum;").nl();
sb.ip("}").nl();
}
private void renderPredTransform(SBPrintStream sb) {
String objFunction = _p.getObjName();
if (ObjectiveType.REG_GAMMA.getId().equals(objFunction) ||
ObjectiveType.REG_TWEEDIE.getId().equals(objFunction) ||
ObjectiveType.COUNT_POISSON.getId().equals(objFunction)) {
renderPredTransformViaLinkFunction(LinkFunctionType.log, sb);
} else if (ObjectiveType.BINARY_LOGISTIC.getId().equals(objFunction)) {
renderPredTransformViaLinkFunction(LinkFunctionType.logit, sb);
} else if (ObjectiveType.REG_LINEAR.getId().equals(objFunction) ||
ObjectiveType.REG_SQUAREDERROR.getId().equals(objFunction) ||
ObjectiveType.RANK_PAIRWISE.getId().equals(objFunction)) {
renderPredTransformViaLinkFunction(LinkFunctionType.identity, sb);
} else if (ObjectiveType.MULTI_SOFTPROB.getId().equals(objFunction)) {
renderMultiClassPredTransform(sb);
} else {
throw new IllegalArgumentException("Unexpected objFunction " + objFunction);
}
}
private void renderPredPostProcess(SBPrintStream sb) {
if (_output.nclasses() > 2) {
sb.ip("for (int i = preds.length-2; i >= 0; i--)").nl();
sb.ip(" preds[1 + i] = preds[i];").nl();
sb.ip("preds[0] = GenModel.getPrediction(preds, PRIOR_CLASS_DISTRIB, data, ").pj(_defaultThreshold).p(");").nl();
} else if (_output.nclasses() == 2) {
sb.ip("preds[1] = 1f - preds[0];").nl();
sb.ip("preds[2] = preds[0];").nl();
sb.ip("preds[0] = GenModel.getPrediction(preds, PRIOR_CLASS_DISTRIB, data, ").pj(_defaultThreshold).p(");").nl();
}
}
public void renderJavaPredictBody(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
renderComputePredict(sb, fileCtx);
renderPredTransform(sb);
renderPredPostProcess(sb);
}
protected abstract void renderComputePredict(SBPrintStream sb, CodeGeneratorPipeline fileCtx);
static class XGBoostPojoTreeWriter extends XGBoostPojoWriter {
protected XGBoostPojoTreeWriter(Predictor p, String namePrefix, XGBoostOutput output, double defaultThreshold) {
super(p, namePrefix, output, defaultThreshold);
}
@Override
public void renderComputePredict(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
GBTree booster = (GBTree) _p.getBooster();
Dart dartBooster = null;
if (booster instanceof Dart) {
dartBooster = (Dart) booster;
}
RegTree[][] trees = booster.getGroupedTrees();
for (int gidx = 0; gidx < trees.length; gidx++) {
sb.ip("float preds_").p(gidx).p(" = ").pj(_p.getBaseScore()).p(";").nl();
for (int tidx = 0; tidx < trees[gidx].length; tidx++) {
String treeClassName = renderTreeClass(trees, gidx, tidx, dartBooster, fileCtx);
sb.ip("preds_").p(gidx).p(" += ").p(treeClassName).p(".score0(data);").nl();
}
sb.ip("preds[").p(gidx).p("] = preds_").p(gidx).p(";").nl();
}
}
private String renderTreeClass(
RegTree[][] trees,
final int gidx,
final int tidx,
final Dart dart,
CodeGeneratorPipeline fileCtx
) {
final RegTree tree = trees[gidx][tidx];
final String className = _namePrefix + "_Tree_g_" + gidx + "_t_" + tidx;
fileCtx.add(new CodeGenerator() {
@Override
public void generate(JCodeSB sb) {
sb.nl().p("class ").p(className).p(" {").nl();
sb.ii(1);
sb.ip("static float score0(double[] data) {").nl();
sb.ii(1);
sb.ip("return ");
if (dart != null) {
sb.pj(dart.weight(tidx)).p(" * ");
}
renderTree(sb, tree, 0);
sb.p(";").nl();
sb.di(1);
sb.ip("}").nl();
sb.di(1);
sb.ip("}").nl();
}
});
return className;
}
private void renderTree(JCodeSB sb, RegTree tree, int nidx) {
RegTreeNode node = tree.getNodes()[nidx];
if (node.isLeaf()) {
sb.ip("").pj(node.getLeafValue());
} else {
String accessor = getFeatureAccessor(node.getSplitIndex());
String operator;
int trueChild;
int falseChild;
if (node.default_left()) {
operator = " < ";
trueChild = node.getLeftChildIndex();
falseChild = node.getRightChildIndex();
} else {
operator = " >= ";
trueChild = node.getRightChildIndex();
falseChild = node.getLeftChildIndex();
}
sb.ip("((Double.isNaN(").p(accessor).p(") || ((float)").p(accessor).p(")").p(operator).pj(node.getSplitCondition()).p(") ?").nl();
sb.ii(1);
renderTree(sb, tree, trueChild);
sb.nl().ip(":").nl();
renderTree(sb, tree, falseChild);
sb.di(1);
sb.nl().ip(")");
}
}
}
static class XGBoostPojoLinearWriter extends XGBoostPojoWriter {
protected XGBoostPojoLinearWriter(Predictor p, String namePrefix, XGBoostOutput output, double defaultThreshold) {
super(p, namePrefix, output, defaultThreshold);
}
@Override
public void renderComputePredict(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
GBLinear booster = (GBLinear) _p.getBooster();
for (int gidx = 0; gidx < booster.getNumOutputGroup(); gidx++) {
sb.ip("preds[").p(gidx).p("] =").nl();
sb.ii(1);
for (int fid = 0; fid < booster.getNumFeature(); fid++) {
String accessor = getFeatureAccessor(fid);
sb.ip("(Double.isNaN(").p(accessor).p(") ? 0 : (").pj(booster.weight(fid, gidx)).p(" * ").p(accessor).p(")) + ").nl();
}
sb.ip("").pj(booster.bias(gidx)).p(" +").nl();
sb.ip("").pj(_p.getBaseScore()).p(";").nl();
sb.di(1);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostTypeMapExtension.java
|
package hex.tree.xgboost;
import hex.tree.xgboost.exec.XGBoostExecReq;
import hex.tree.xgboost.matrix.SparseMatrixDimensions;
import hex.tree.xgboost.remote.RemoteXGBoostHandler;
import hex.tree.xgboost.task.XGBoostUploadMatrixTask;
import hex.tree.xgboost.util.FeatureScore;
import water.TypeMapExtension;
public class XGBoostTypeMapExtension implements TypeMapExtension {
private static final String[] EXTERNAL_COMMUNICATION_CLASSES = {
XGBoostExecReq.class.getName(),
XGBoostExecReq.Init.class.getName(),
XGBoostExecReq.Update.class.getName(),
XGBoostExecReq.GetEvalMetric.class.getName(),
SparseMatrixDimensions.class.getName(),
EvalMetric.class.getName(),
RemoteXGBoostHandler.RemoteExecutors.class.getName(),
XGBoostUploadMatrixTask.DenseMatrixChunk.class.getName(),
XGBoostUploadMatrixTask.DenseMatrixDimensions.class.getName(),
XGBoostUploadMatrixTask.MatrixData.class.getName(),
XGBoostUploadMatrixTask.SparseMatrixChunk.class.getName(),
FeatureScore.class.getName()
};
@Override
public String[] getBoostrapClasses() {
return EXTERNAL_COMMUNICATION_CLASSES;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XGBoostUtils.java
|
package hex.tree.xgboost;
import hex.DataInfo;
import hex.tree.xgboost.matrix.DenseMatrixFactory;
import hex.tree.xgboost.matrix.MatrixLoader;
import hex.tree.xgboost.matrix.SparseMatrixFactory;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.XGBoostError;
import org.apache.log4j.Logger;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.VecUtils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import static water.H2O.technote;
import static water.MemoryManager.malloc4f;
public class XGBoostUtils {
private static final Logger LOG = Logger.getLogger(XGBoostUtils.class);
public static void createFeatureMap(XGBoostModel model, Frame train) {
// Create a "feature map" and store in a temporary file (for Variable Importance, MOJO, ...)
DataInfo dataInfo = model.model_info().dataInfo();
assert dataInfo != null;
String featureMap = makeFeatureMap(train, dataInfo);
model.model_info().setFeatureMap(featureMap);
}
private static String makeFeatureMap(Frame f, DataInfo di) {
// set the names for the (expanded) columns
String[] coefnames = di.coefNames();
StringBuilder sb = new StringBuilder();
assert(coefnames.length == di.fullN());
int catCols = di._catOffsets[di._catOffsets.length-1];
for (int i = 0; i < di.fullN(); i++) {
sb.append(i).append(" ").append(coefnames[i].replaceAll("\\s*","")).append(" ");
if (i < catCols || f.vec(i-catCols).isBinary())
sb.append("i");
else if (f.vec(i-catCols).isInt())
sb.append("int");
else
sb.append("q");
sb.append("\n");
}
return sb.toString();
}
/**
* convert an H2O Frame to a sparse DMatrix
* @param di data info
* @param frame H2O Frame - adapted using a provided data info
* @param response name of the response column
* @param weight name of the weight column
* @return DMatrix
* @throws XGBoostError
*/
public static MatrixLoader.DMatrixProvider convertFrameToDMatrix(DataInfo di,
Frame frame,
String response,
String weight,
String offset,
boolean sparse) {
assert di != null;
int[] chunks = VecUtils.getLocalChunkIds(frame.anyVec());
final Vec responseVec = frame.vec(response);
final Vec weightVec = frame.vec(weight);
final Vec offsetsVec = frame.vec(offset);
final int[] nRowsByChunk = new int[chunks.length];
final long nRowsL = sumChunksLength(chunks, responseVec, Optional.ofNullable(weightVec), nRowsByChunk);
if (nRowsL > Integer.MAX_VALUE) {
throw new IllegalArgumentException("XGBoost currently doesn't support datasets with more than " +
Integer.MAX_VALUE + " per node. " +
"To train a XGBoost model on this dataset add more nodes to your H2O cluster and use distributed training.");
}
final int nRows = (int) nRowsL;
final MatrixLoader.DMatrixProvider trainMat;
// In the future this 2 arrays might also need to be rewritten into float[][],
// but only if we want to handle datasets over 2^31-1 on a single machine. For now I'd leave it as it is.
float[] resp = malloc4f(nRows);
float[] weights = null;
float[] offsets = null;
if (weightVec != null) {
weights = malloc4f(nRows);
}
if (offsetsVec != null) {
offsets = malloc4f(nRows);
}
if (sparse) {
LOG.debug("Treating matrix as sparse.");
trainMat = SparseMatrixFactory.csr(frame, chunks, weightVec, offsetsVec, responseVec, di, resp, weights, offsets);
} else {
LOG.debug("Treating matrix as dense.");
trainMat = DenseMatrixFactory.dense(frame, chunks, nRows, nRowsByChunk, weightVec, offsetsVec, responseVec, di, resp, weights, offsets);
}
return trainMat;
}
/**
* Counts a total sum of chunks inside a vector. Only chunks present in chunkIds are counted.
* If a weights vector is provided, only rows with non-zero weights are counted.
*
* @param chunkIds Chunk ids to consider during the calculation. Chunks IDs not listed are not included.
* @param vec Vector containing given chunk identifiers
* @param weightsVector Vector with row weights, possibly an empty optional
* @param chunkLengths Array of integers where the lengths of the individual chunks will be added. Initialization to an array of 0's is expected.
* @return A sum of chunk lengths. Possibly zero, if there are no chunks or the chunks are empty.
*/
public static long sumChunksLength(int[] chunkIds, Vec vec, Optional<Vec> weightsVector, int[] chunkLengths) {
assert chunkLengths.length == chunkIds.length;
for (int i = 0; i < chunkIds.length; i++) {
final int chunk = chunkIds[i];
if (weightsVector.isPresent()) {
final Chunk weightVecChunk = weightsVector.get().chunkForChunkIdx(chunk);
assert weightVecChunk.len() == vec.chunkLen(chunk); // Chunk layout of both vectors must be the same
if (weightVecChunk.len() == 0) continue;
int nzIndex = 0;
do {
if (weightVecChunk.atd(nzIndex) != 0) chunkLengths[i]++;
nzIndex = weightVecChunk.nextNZ(nzIndex, true);
} while (nzIndex > 0 && nzIndex < weightVecChunk._len);
} else {
chunkLengths[i] = vec.chunkLen(chunk);
}
}
long totalChunkLength = 0;
for (int cl : chunkLengths) {
totalChunkLength += cl;
}
return totalChunkLength;
}
/**
* convert a set of H2O chunks (representing a part of a vector) to a sparse DMatrix
* @return DMatrix
* @throws XGBoostError
*/
public static DMatrix convertChunksToDMatrix(
DataInfo di, Chunk[] chunks, int response, boolean sparse, int offset
) throws XGBoostError {
int nRows = chunks[0]._len;
DMatrix trainMat;
float[] resp = malloc4f(nRows);
float[] off = null;
if (offset >= 0) {
off = malloc4f(nRows);
}
try {
if (sparse) {
LOG.debug("Treating matrix as sparse.");
trainMat = SparseMatrixFactory.csr(chunks, -1, response, offset, di, resp, null, off);
} else {
trainMat = DenseMatrixFactory.dense(chunks, di, response, resp, null, offset, off);
}
} catch (NegativeArraySizeException e) {
throw new IllegalArgumentException(technote(11,
"Data is too large to fit into the 32-bit Java float[] array that needs to be passed to the XGBoost C++ backend. Use H2O GBM instead."));
}
int len = (int) trainMat.rowNum();
if (off != null) {
off = Arrays.copyOf(off, len);
trainMat.setBaseMargin(off);
}
resp = Arrays.copyOf(resp, len);
trainMat.setLabel(resp);
return trainMat;
}
public static FeatureProperties assembleFeatureNames(final DataInfo di) {
String[] coefnames = di.coefNames();
assert (coefnames.length == di.fullN());
int numCatCols = di._catOffsets[di._catOffsets.length - 1];
String[] featureNames = new String[di.fullN()];
boolean[] oneHotEncoded = new boolean[di.fullN()];
int[] originalColumnIndices = di.coefOriginalColumnIndices();
for (int i = 0; i < di.fullN(); i++) {
featureNames[i] = coefnames[i];
if (i < numCatCols) {
oneHotEncoded[i] = true;
}
}
return new FeatureProperties(di._adaptedFrame._names, featureNames, oneHotEncoded, originalColumnIndices);
}
public static class FeatureProperties {
public String[] _originalNames;
public Map<String, Integer> _originalNamesMap;
public String[] _names;
public boolean[] _oneHotEncoded;
public int[] _originalColumnIndices;
public FeatureProperties(String[] originalNames, String[] names, boolean[] oneHotEncoded, int[] originalColumnIndices) {
_originalNames = originalNames;
_originalNamesMap = new HashMap<>();
for(int i = 0; i < originalNames.length; i++){
_originalNamesMap.put(originalNames[i], i);
}
_names = names;
_oneHotEncoded = oneHotEncoded;
_originalColumnIndices = originalColumnIndices;
}
public int getOriginalIndex(String originalName){
return _originalNamesMap.get(originalName);
}
public Integer[] mapOriginalNamesToIndices(String[] names){
Integer[] res = new Integer[names.length];
for(int i = 0; i<names.length; i++){
res[i] = getOriginalIndex(names[i]);
}
return res;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/XgbVarImp.java
|
package hex.tree.xgboost;
import hex.VarImp;
public class XgbVarImp extends VarImp {
final public float[] _covers; // Cover of each variable
final public int[] _freqs; // Variable frequencies
public XgbVarImp(String[] names, float[] gains, float[] covers, int[] freqs) {
super(gains, names);
_covers = covers;
_freqs = freqs;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/exec/LocalXGBoostExecutor.java
|
package hex.tree.xgboost.exec;
import hex.CustomMetric;
import hex.DataInfo;
import hex.genmodel.utils.IOUtils;
import hex.tree.xgboost.BoosterParms;
import hex.tree.xgboost.EvalMetric;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.matrix.FrameMatrixLoader;
import hex.tree.xgboost.matrix.MatrixLoader;
import hex.tree.xgboost.matrix.RemoteMatrixLoader;
import hex.tree.xgboost.rabit.RabitTrackerH2O;
import hex.tree.xgboost.task.XGBoostCleanupTask;
import hex.tree.xgboost.task.XGBoostSetupTask;
import hex.tree.xgboost.task.XGBoostUpdateTask;
import water.DKV;
import water.H2O;
import water.Key;
import water.Keyed;
import water.fvec.Frame;
import water.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static hex.tree.xgboost.remote.RemoteXGBoostUploadServlet.getCheckpointFile;
public class LocalXGBoostExecutor implements XGBoostExecutor {
interface CheckpointProvider { byte[] get(); }
public final Key modelKey;
private final BoosterParms boosterParams;
private final MatrixLoader loader;
private final CheckpointProvider checkpointProvider;
private final boolean[] nodes;
private final String saveMatrixDirectory;
private final RabitTrackerH2O rt;
private final Key<Frame> toCleanUp;
private XGBoostSetupTask setupTask;
private XGBoostUpdateTask updateTask;
private EvalMetric evalMetric;
/**
* Used when executing from a remote model
*/
public LocalXGBoostExecutor(Key key, XGBoostExecReq.Init init) {
modelKey = key;
rt = setupRabitTracker(init.num_nodes);
boosterParams = BoosterParms.fromMap(init.parms);
nodes = new boolean[H2O.CLOUD.size()];
for (int i = 0; i < init.num_nodes; i++) nodes[i] = init.nodes[i] != null;
loader = new RemoteMatrixLoader(modelKey);
toCleanUp = null;
saveMatrixDirectory = init.save_matrix_path;
checkpointProvider = () -> {
if (!init.has_checkpoint) {
return null;
} else {
File checkpointFile = getCheckpointFile(modelKey.toString());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try (FileInputStream fis = new FileInputStream(checkpointFile)) {
IOUtils.copyStream(fis, bos);
} catch (IOException e) {
throw new RuntimeException("Failed writing data to response.", e);
} finally {
checkpointFile.delete();
}
return bos.toByteArray();
}
};
}
/**
* Used when executing from a local model
*/
public LocalXGBoostExecutor(XGBoostModel model, Frame train, Frame valid) {
modelKey = model._key;
XGBoostSetupTask.FrameNodes trainFrameNodes = XGBoostSetupTask.findFrameNodes(train);
if (valid != null) {
XGBoostSetupTask.FrameNodes validFrameNodes = XGBoostSetupTask.findFrameNodes(valid);
if (!validFrameNodes.isSubsetOf(trainFrameNodes)) {
Log.warn("Need to re-distribute the Validation Frame because it has data on nodes that " +
"don't have any data of the training matrix. This might impact runtime performance.");
toCleanUp = Key.make();
valid = train.makeSimilarlyDistributed(valid, toCleanUp);
} else
toCleanUp = null;
} else
toCleanUp = null;
rt = setupRabitTracker(trainFrameNodes.getNumNodes());
DataInfo dataInfo = model.model_info().dataInfo();
boosterParams = XGBoostModel.createParams(model._parms, model._output.nclasses(), dataInfo.coefNames());
model._output._native_parameters = boosterParams.toTwoDimTable();
loader = new FrameMatrixLoader(model, train, valid);
nodes = trainFrameNodes._nodes;
saveMatrixDirectory = model._parms._save_matrix_directory;
checkpointProvider = () -> {
if (model._parms.hasCheckpoint()) {
return model.model_info()._boosterBytes;
} else {
return null;
}
};
}
@Override
public byte[] setup() {
setupTask = new XGBoostSetupTask(
modelKey, saveMatrixDirectory, boosterParams, checkpointProvider.get(), getRabitEnv(), nodes, loader
);
setupTask.run();
updateTask = new XGBoostUpdateTask(setupTask, 0).run();
return updateTask.getBoosterBytes();
}
private RabitTrackerH2O setupRabitTracker(int numNodes) {
// XGBoost seems to manipulate its frames in case of a 1 node distributed version in a way
// the GPU plugin can't handle Therefore don't use RabitTracker envs for 1 node
if (numNodes > 1) {
RabitTrackerH2O rt = new RabitTrackerH2O(numNodes);
rt.start(0);
return rt;
} else {
return null;
}
}
private void stopRabitTracker() {
if (rt != null) {
rt.waitFor(0);
rt.stop();
}
}
private Map<String, String> getRabitEnv() {
if (rt != null) {
return rt.getWorkerEnvs();
} else {
return new HashMap<>();
}
}
@Override
public void update(int treeId) {
evalMetric = null; // invalidate cached eval metric
updateTask = new XGBoostUpdateTask(setupTask, treeId);
updateTask.run();
}
@Override
public EvalMetric getEvalMetric() {
if (evalMetric != null) { // re-use cached value, this is important for early stopping when final scoring is done without prior boosting iteration
return evalMetric;
}
if (updateTask == null) {
throw new IllegalStateException(
"Custom metric can only be retrieved immediately after running update iteration.");
}
evalMetric = updateTask.getEvalMetric();
return evalMetric;
}
@Override
public byte[] updateBooster() {
if (updateTask != null) {
byte[] booster = updateTask.getBoosterBytes();
updateTask = null;
return booster;
}
return null;
}
@Override
public void close() {
if (toCleanUp != null) {
Keyed.remove(toCleanUp);
}
XGBoostCleanupTask.cleanUp(setupTask);
stopRabitTracker();
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/exec/RemoteXGBoostExecutor.java
|
package hex.tree.xgboost.exec;
import hex.DataInfo;
import hex.schemas.XGBoostExecRespV3;
import hex.tree.xgboost.BoosterParms;
import hex.tree.xgboost.EvalMetric;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.remote.RemoteXGBoostHandler;
import hex.tree.xgboost.task.XGBoostUploadMatrixTask;
import hex.tree.xgboost.task.XGBoostSetupTask;
import org.apache.log4j.Logger;
import water.H2O;
import water.Key;
import water.Keyed;
import water.TypeMap;
import water.fvec.Frame;
import water.util.Log;
import java.util.Arrays;
public class RemoteXGBoostExecutor implements XGBoostExecutor {
private static final Logger LOG = Logger.getLogger(RemoteXGBoostExecutor.class);
public final XGBoostHttpClient http;
public final Key modelKey;
public RemoteXGBoostExecutor(XGBoostModel model, Frame train, Frame valid,
String remoteUri, String userName, String password) {
final boolean https = H2O.ARGS.jks != null;
http = new XGBoostHttpClient(remoteUri, https, userName, password);
modelKey = model._key;
XGBoostExecReq.Init req = new XGBoostExecReq.Init();
XGBoostSetupTask.FrameNodes trainFrameNodes = XGBoostSetupTask.findFrameNodes(train);
req.num_nodes = trainFrameNodes.getNumNodes();
DataInfo dataInfo = model.model_info().dataInfo();
req.setParms(XGBoostModel.createParamsMap(model._parms, model._output.nclasses(), dataInfo.coefNames()));
model._output._native_parameters = BoosterParms.fromMap(req.parms).toTwoDimTable();
req.save_matrix_path = model._parms._save_matrix_directory;
req.nodes = collectNodes(trainFrameNodes);
LOG.info("Initializing remote executor.");
XGBoostExecRespV3 resp = http.postJson(modelKey, "init", req);
RemoteXGBoostHandler.RemoteExecutors executors = resp.readData();
if (! Arrays.equals(executors._typeMap, TypeMap.bootstrapClasses())) {
LOG.error("TypeMap differs: " +
"H2O=" + Arrays.toString(TypeMap.bootstrapClasses()) + ";" +
"XGB=" + Arrays.toString(executors._typeMap)
);
throw new IllegalStateException("H2O Cluster and XGBoost external cluster do not have identical TypeMap.");
}
assert modelKey.equals(resp.key.key());
uploadCheckpointBooster(model);
uploadMatrix(model, train, true, trainFrameNodes, executors._nodes, https, remoteUri, userName, password);
if (valid != null) {
XGBoostSetupTask.FrameNodes validFrameNodes = XGBoostSetupTask.findFrameNodes(valid);
Key<Frame> toCleanUp = null;
if (!validFrameNodes.isSubsetOf(trainFrameNodes)) {
Log.info("Validation Frame will be re-distributed to be collocated with remote nodes of the " +
"training matrix.");
toCleanUp = Key.make();
valid = train.makeSimilarlyDistributed(valid, toCleanUp);
}
uploadMatrix(model, valid, false, validFrameNodes, executors._nodes, https, remoteUri, userName, password);
if (toCleanUp != null) {
Keyed.remove(toCleanUp);
}
}
LOG.info("Remote executor init complete.");
}
private void uploadMatrix(
XGBoostModel model, Frame train, boolean isTrain,
XGBoostSetupTask.FrameNodes trainFrameNodes, String[] remoteNodes,
boolean https, String leaderUri, String userName, String password
) {
LOG.info("Starting matrix data upload.");
new XGBoostUploadMatrixTask(
model, train, isTrain, trainFrameNodes._nodes, remoteNodes,
https, parseContextPath(leaderUri), userName, password
).run();
}
private String parseContextPath(String leaderUri) {
int slashIndex = leaderUri.indexOf("/");
if (slashIndex > 0) {
return leaderUri.substring(slashIndex);
} else {
return "";
}
}
private void uploadCheckpointBooster(XGBoostModel model) {
if (!model._parms.hasCheckpoint()) {
return;
}
LOG.info("Uploading booster checkpoint.");
http.uploadCheckpointBytes(modelKey, model.model_info()._boosterBytes);
}
private String[] collectNodes(XGBoostSetupTask.FrameNodes nodes) {
String[] res = new String[H2O.CLOUD.size()];
for (int i = 0; i < nodes._nodes.length; i++) {
if (nodes._nodes[i]) {
res[i] = H2O.CLOUD.members()[i].getIpPortString();
}
}
return res;
}
@Override
public byte[] setup() {
XGBoostExecReq req = new XGBoostExecReq(); // no req params
return http.downloadBytes(modelKey, "setup", req);
}
@Override
public void update(int treeId) {
XGBoostExecReq.Update req = new XGBoostExecReq.Update();
req.treeId = treeId;
XGBoostExecRespV3 resp = http.postJson(modelKey, "update", req);
assert resp.key.key().equals(modelKey);
}
@Override
public byte[] updateBooster() {
XGBoostExecReq req = new XGBoostExecReq(); // no req params
return http.downloadBytes(modelKey, "getBooster", req);
}
@Override
public EvalMetric getEvalMetric() {
XGBoostExecReq.GetEvalMetric req = new XGBoostExecReq.GetEvalMetric();
XGBoostExecRespV3 resp = http.postJson(modelKey, "getEvalMetric", req);
assert resp.key.key().equals(modelKey);
return resp.readData();
}
@Override
public void close() {
XGBoostExecReq req = new XGBoostExecReq(); // no req params
XGBoostExecRespV3 resp = http.postJson(modelKey, "cleanup", req);
assert resp.key.key().equals(modelKey);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/exec/XGBoostExecReq.java
|
package hex.tree.xgboost.exec;
import water.BootstrapFreezable;
import water.Iced;
import water.util.IcedHashMapGeneric;
import java.util.Arrays;
import java.util.Map;
public class XGBoostExecReq extends Iced<XGBoostExecReq> implements BootstrapFreezable<XGBoostExecReq> {
@Override
public String toString() {
return "XGBoostExecReq{}";
}
public static class Init extends XGBoostExecReq {
public int num_nodes;
public IcedHashMapGeneric.IcedHashMapStringObject parms;
public String save_matrix_path;
public String[] nodes;
public boolean has_checkpoint;
public void setParms(Map<String, Object> parms) {
this.parms = new IcedHashMapGeneric.IcedHashMapStringObject();
this.parms.putAll(parms);
}
@Override
public String toString() {
return "XGBoostExecReq.Init{" +
"num_nodes=" + num_nodes +
", parms=" + parms +
", save_matrix_path='" + save_matrix_path + '\'' +
", nodes=" + Arrays.toString(nodes) +
", has_checkpoint=" + has_checkpoint +
'}';
}
}
public static class Update extends XGBoostExecReq {
public int treeId;
@Override
public String toString() {
return "XGBoostExecReq.Update{" +
"treeId=" + treeId +
'}';
}
}
public static class GetEvalMetric extends XGBoostExecReq {
@Override
public String toString() {
return "XGBoostExecReq.GetEvalMetric{}";
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/exec/XGBoostExecutor.java
|
package hex.tree.xgboost.exec;
import hex.tree.xgboost.EvalMetric;
public interface XGBoostExecutor extends AutoCloseable {
byte[] setup();
void update(int treeId);
byte[] updateBooster();
/**
* Retrieves the value of the evaluation metric both for training and validation dataset.
* @return instance of EvalMetric if "eval_metric" was defined, null otherwise
*/
EvalMetric getEvalMetric();
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/exec/XGBoostHttpClient.java
|
package hex.tree.xgboost.exec;
import hex.genmodel.utils.IOUtils;
import hex.schemas.XGBoostExecReqV3;
import hex.schemas.XGBoostExecRespV3;
import water.BootstrapFreezable;
import hex.tree.xgboost.remote.RemoteXGBoostUploadServlet;
import org.apache.http.HttpEntity;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.AuthenticationException;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.entity.AbstractHttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.ssl.SSLContexts;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Logger;
import water.AutoBuffer;
import water.Key;
import javax.net.ssl.SSLContext;
import java.io.*;
import java.net.URISyntaxException;
import java.security.GeneralSecurityException;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.http.HttpHeaders.CONTENT_TYPE;
import static water.util.HttpResponseStatus.OK;
public class XGBoostHttpClient {
private static final Logger LOG = Logger.getLogger(XGBoostHttpClient.class);
private final String baseUri;
private final HttpClientBuilder clientBuilder;
private final UsernamePasswordCredentials credentials;
interface ResponseTransformer<T> {
T transform(HttpEntity e) throws IOException;
}
private static final ResponseTransformer<byte[]> ByteArrayResponseTransformer = (e) -> {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOUtils.copyStream(e.getContent(), bos);
bos.close();
byte[] b = bos.toByteArray();
if (b.length == 0) return null;
else return b;
};
private static final ResponseTransformer<XGBoostExecRespV3> JsonResponseTransformer = (e) -> {
String responseBody = EntityUtils.toString(e);
XGBoostExecRespV3 resp = new XGBoostExecRespV3();
resp.fillFromBody(responseBody);
return resp;
};
public XGBoostHttpClient(String baseUri, boolean https, String userName, String password) {
String suffix = "3/XGBoostExecutor.";
if (!baseUri.endsWith("/")) suffix = "/" + suffix;
this.baseUri = (https ? "https" : "http") + "://" + baseUri + suffix;
if (userName != null) {
credentials = new UsernamePasswordCredentials(userName, password);
} else {
credentials = null;
}
this.clientBuilder = createClientBuilder(https);
}
private HttpClientBuilder createClientBuilder(boolean https) {
try {
HttpClientBuilder builder = HttpClientBuilder.create();
if (https) {
SSLContext sslContext = SSLContexts.custom()
.loadTrustMaterial(TrustSelfSignedStrategy.INSTANCE)
.build();
SSLConnectionSocketFactory sslFactory = new SSLConnectionSocketFactory(
sslContext,
NoopHostnameVerifier.INSTANCE
);
builder.setSSLSocketFactory(sslFactory);
}
if (credentials != null) {
CredentialsProvider provider = new BasicCredentialsProvider();
provider.setCredentials(AuthScope.ANY, credentials);
builder.setDefaultCredentialsProvider(provider);
}
return builder;
} catch (GeneralSecurityException e) {
throw new RuntimeException("Failed to initialize HTTP client.", e);
}
}
public XGBoostExecRespV3 postJson(Key key, String method, XGBoostExecReq reqContent) {
return post(key, method, reqContent, JsonResponseTransformer);
}
public byte[] downloadBytes(Key key, String method, XGBoostExecReq reqContent) {
return post(key, method, reqContent, ByteArrayResponseTransformer);
}
private <T> T post(Key key, String method, XGBoostExecReq reqContent, ResponseTransformer<T> transformer) {
LOG.info("Request " + method + " " + key + " " + reqContent);
XGBoostExecReqV3 req = new XGBoostExecReqV3(key, reqContent);
HttpPost httpReq = new HttpPost(baseUri + method);
httpReq.setEntity(new StringEntity(req.toJsonString(), UTF_8));
httpReq.setHeader(CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
return executeRequestAndReturnResponse(httpReq, transformer);
}
private HttpPost makeUploadRequest(Key key, RemoteXGBoostUploadServlet.RequestType dataType) {
try {
URIBuilder uri = new URIBuilder(baseUri + "upload");
uri.setParameter("model_key", key.toString())
.setParameter("data_type", dataType.toString());
return new HttpPost(uri.build());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to build request URI.", e);
}
}
private HttpPost makeUploadMatrixRequest(Key key, RemoteXGBoostUploadServlet.RequestType requestType,
RemoteXGBoostUploadServlet.MatrixRequestType matrixRequestType) {
try {
URIBuilder uri = new URIBuilder(baseUri + "upload");
uri.setParameter("model_key", key.toString())
.setParameter("request_type", requestType.toString())
.setParameter("data_type", matrixRequestType.toString());
return new HttpPost(uri.build());
} catch (URISyntaxException e) {
throw new RuntimeException("Failed to build request URI.", e);
}
}
public void uploadCheckpointBytes(Key<?> key, byte[] data) {
LOG.info("Request upload checkpoint of model " + key + ", checkpoint size = " + data.length + " bytes");
HttpPost httpReq = makeUploadRequest(key, RemoteXGBoostUploadServlet.RequestType.checkpoint);
httpReq.setEntity(new InputStreamEntity(new ByteArrayInputStream(data)));
addAuthentication(httpReq);
XGBoostExecRespV3 resp = executeRequestAndReturnResponse(httpReq, JsonResponseTransformer);
assert resp.key.key().equals(key);
}
private static class ObjectEntity extends AbstractHttpEntity {
private final BootstrapFreezable<?> object;
private ObjectEntity(BootstrapFreezable<?> object) {
this.object = object;
}
@Override
public void writeTo(OutputStream out) throws IOException {
LOG.debug("Sending " + object);
try (AutoBuffer ab = new AutoBuffer(out, false)) {
ab.put(object);
}
out.flush();
}
@Override
public boolean isStreaming() {
return true;
}
@Override
public boolean isRepeatable() {
return false;
}
@Override
public long getContentLength() {
return -1;
}
@Override
public InputStream getContent() throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
}
public void uploadMatrixData(Key<?> key,
RemoteXGBoostUploadServlet.MatrixRequestType matrixRequestType, boolean isTrain,
BootstrapFreezable<?> data) {
LOG.info("Request upload " + key + " " + matrixRequestType + " " + data.getClass().getSimpleName());
RemoteXGBoostUploadServlet.RequestType requestType = isTrain ?
RemoteXGBoostUploadServlet.RequestType.matrixTrain : RemoteXGBoostUploadServlet.RequestType.matrixValid;
HttpPost httpReq = makeUploadMatrixRequest(key, requestType, matrixRequestType);
httpReq.setEntity(new ObjectEntity(data));
addAuthentication(httpReq);
XGBoostExecRespV3 resp = executeRequestAndReturnResponse(httpReq, JsonResponseTransformer);
assert resp.key.key().equals(key);
}
/*
For binary POST requests its necessary to add auth this way
*/
private void addAuthentication(HttpPost httpReq) {
if (credentials != null) {
try {
httpReq.addHeader(new BasicScheme().authenticate(credentials, httpReq, null));
} catch (AuthenticationException e) {
throw new IllegalStateException("Unable to authenticate request.", e);
}
}
}
private <T> T executeRequestAndReturnResponse(HttpPost req, ResponseTransformer<T> transformer) {
try (CloseableHttpClient client = clientBuilder.build();
CloseableHttpResponse response = client.execute(req)) {
if (response.getStatusLine().getStatusCode() != OK.getCode()) {
throw new IllegalStateException("Unexpected response (status: " + response.getStatusLine() + ").");
}
LOG.debug("Response received " + response.getEntity().getContentLength() + " bytes.");
return transformer.transform(response.getEntity());
} catch (IOException e) {
throw new RuntimeException("HTTP Request failed", e);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/DenseMatrixFactory.java
|
package hex.tree.xgboost.matrix;
import hex.DataInfo;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.XGBoostError;
import ai.h2o.xgboost4j.java.util.BigDenseMatrix;
import org.apache.log4j.Logger;
import water.H2O;
import water.LocalMR;
import water.MrFun;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import java.util.Objects;
import static hex.tree.xgboost.matrix.MatrixFactoryUtils.setResponseAndWeightAndOffset;
public class DenseMatrixFactory {
private static final Logger LOG = Logger.getLogger(DenseMatrixFactory.class);
public static DMatrix dense(
Chunk[] chunks, DataInfo di, int respIdx, float[] resp, float[] weights, int offsetIdx, float[] offsets
) throws XGBoostError {
LOG.debug("Treating matrix as dense.");
BigDenseMatrix data = null;
try {
data = allocateDenseMatrix(chunks[0].len(), di);
long actualRows = denseChunk(data, chunks, respIdx, di, resp, weights, offsetIdx, offsets);
assert actualRows == data.nrow;
return new DMatrix(data, Float.NaN);
} finally {
if (data != null) {
data.dispose();
}
}
}
public static class DenseDMatrixProvider extends MatrixLoader.DMatrixProvider {
private BigDenseMatrix data;
protected DenseDMatrixProvider(
long actualRows,
float[] response,
float[] weights,
float[] offsets,
BigDenseMatrix data
) {
super(actualRows, response, weights, offsets);
this.data = data;
}
@Override
public void print(int nrow) {
for (int i = 0; i < (nrow > 0 ? nrow : data.nrow); i++) {
System.out.print(i + ":");
for (int j = 0; j < data.ncol; j++) {
System.out.print(data.get(i, j) + ", ");
}
System.out.print(response[i]);
System.out.println();
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
DenseDMatrixProvider that = (DenseDMatrixProvider) o;
if (that.data.ncol != this.data.ncol || that.data.nrow != this.data.nrow) return false;
for (int i = 0; i < this.data.nrow; i++) {
for (int j = 0; j < this.data.ncol; j++) {
if (this.data.get(i, j) != that.data.get(i, j)) {
return false;
}
}
}
return true;
}
@Override
public int hashCode() {
return super.hashCode() + Objects.hash(data);
}
@Override
public DMatrix makeDMatrix() throws XGBoostError {
return new DMatrix(data, Float.NaN);
}
@Override
protected void dispose() {
if (data != null) {
data.dispose();
data = null;
}
}
}
public static DenseDMatrixProvider dense(
Frame f, int[] chunks, int nRows, int[] nRowsByChunk, Vec weightVec, Vec offsetVec, Vec responseVec,
DataInfo di, float[] resp, float[] weights, float[] offsets
) {
BigDenseMatrix data = null;
try {
data = allocateDenseMatrix(nRows, di);
final int actualRows;
if (nRows != 0) {
actualRows = denseChunk(data, chunks, nRowsByChunk, f, weightVec, offsetVec, responseVec, di, resp, weights, offsets);
} else {
actualRows = 0;
}
assert data.nrow == actualRows;
return new DenseDMatrixProvider(actualRows, resp, weights, offsets, data);
} catch (Exception e) {
if (data != null) {
data.dispose();
}
throw new RuntimeException("Error while create off-heap matrix.", e);
}
}
private static int denseChunk(
BigDenseMatrix data,
int[] chunks, int[] nRowsByChunk, Frame f, Vec weightsVec, Vec offsetVec, Vec respVec, DataInfo di,
float[] resp, float[] weights, float[] offsets
) {
int[] rowOffsets = new int[nRowsByChunk.length + 1];
for (int i = 0; i < chunks.length; i++) {
rowOffsets[i + 1] = nRowsByChunk[i] + rowOffsets[i];
}
WriteDenseChunkFun writeFun = new WriteDenseChunkFun(
f, chunks, rowOffsets, weightsVec, offsetVec, respVec, di, data, resp, weights, offsets
);
H2O.submitTask(new LocalMR(writeFun, chunks.length)).join();
return writeFun.getTotalRows();
}
private static class WriteDenseChunkFun extends MrFun<WriteDenseChunkFun> {
private final Frame _f;
private final int[] _chunks;
private final int[] _rowOffsets;
private final Vec _weightsVec;
private final Vec _offsetsVec;
private final Vec _respVec;
private final DataInfo _di;
private final BigDenseMatrix _data;
private final float[] _resp;
private final float[] _weights;
private final float[] _offsets;
// OUT
private final int[] _nRowsByChunk;
private WriteDenseChunkFun(Frame f, int[] chunks, int[] rowOffsets, Vec weightsVec, Vec offsetsVec, Vec respVec, DataInfo di,
BigDenseMatrix data, float[] resp, float[] weights, float[] offsets) {
_f = f;
_chunks = chunks;
_rowOffsets = rowOffsets;
_weightsVec = weightsVec;
_offsetsVec = offsetsVec;
_respVec = respVec;
_di = di;
_data = data;
_resp = resp;
_weights = weights;
_offsets = offsets;
_nRowsByChunk = new int[chunks.length];
}
@Override
protected void map(int id) {
final int chunkIdx = _chunks[id];
Chunk[] chks = new Chunk[_f.numCols()];
for (int c = 0; c < chks.length; c++) {
chks[c] = _f.vec(c).chunkForChunkIdx(chunkIdx);
}
Chunk weightsChk = _weightsVec != null ? _weightsVec.chunkForChunkIdx(chunkIdx) : null;
Chunk offsetsChk = _offsetsVec != null ? _offsetsVec.chunkForChunkIdx(chunkIdx) : null;
Chunk respChk = _respVec.chunkForChunkIdx(chunkIdx);
long idx = _rowOffsets[id] * _data.ncol;
int actualRows = 0;
for (int i = 0; i < chks[0]._len; i++) {
if (weightsChk != null && weightsChk.atd(i) == 0) continue;
idx = writeDenseRow(_di, chks, i, _data, idx);
_resp[_rowOffsets[id] + actualRows] = (float) respChk.atd(i);
if (weightsChk != null) {
_weights[_rowOffsets[id] + actualRows] = (float) weightsChk.atd(i);
}
if (offsetsChk != null) {
_offsets[_rowOffsets[id] + actualRows] = (float) offsetsChk.atd(i);
}
actualRows++;
}
assert idx == (long) _rowOffsets[id + 1] * _data.ncol;
_nRowsByChunk[id] = actualRows;
}
private int getTotalRows() {
int totalRows = 0;
for (int r : _nRowsByChunk) {
totalRows += r;
}
return totalRows;
}
}
private static long denseChunk(
BigDenseMatrix data, Chunk[] chunks, int respIdx, DataInfo di, float[] resp, float[] weights,
int offsetIdx, float[] offsets
) {
long idx = 0;
long actualRows = 0;
int rwRow = 0;
for (int i = 0; i < chunks[0]._len; i++) {
idx = writeDenseRow(di, chunks, i, data, idx);
actualRows++;
rwRow = setResponseAndWeightAndOffset(chunks, respIdx, -1, offsetIdx, resp, weights, offsets, rwRow, i);
}
assert (long) data.nrow * data.ncol == idx;
return actualRows;
}
private static long writeDenseRow(
DataInfo di, Chunk[] chunks, int rowInChunk,
BigDenseMatrix data, long idx
) {
for (int j = 0; j < di._cats; j++) {
int len = di._catOffsets[j+1] - di._catOffsets[j];
double val = chunks[j].isNA(rowInChunk) ? Double.NaN : chunks[j].at8(rowInChunk);
int pos = di.getCategoricalId(j, val) - di._catOffsets[j];
for (int cat = 0; cat < len; cat++)
data.set(idx + cat, 0f); // native memory => explicit zero-ing is necessary
data.set(idx + pos, 1f);
idx += len;
}
for (int j = 0; j < di._nums; j++) {
float val = chunks[di._cats + j].isNA(rowInChunk) ? Float.NaN : (float) chunks[di._cats + j].atd(rowInChunk);
data.set(idx++, val);
}
return idx;
}
/**
* Allocated an exactly-sized float[] array serving as a backing array for XGBoost's {@link DMatrix}.
* The backing array created by this method does not contain any actual data and needs to be filled.
*
* @param rowCount Number of rows to allocate data for
* @param dataInfo An instance of {@link DataInfo}
* @return An exactly-sized Float[] backing array for XGBoost's {@link DMatrix} to be filled with data.
*/
private static BigDenseMatrix allocateDenseMatrix(final int rowCount, final DataInfo dataInfo) {
return new BigDenseMatrix(rowCount, dataInfo.fullN());
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/FrameMatrixLoader.java
|
package hex.tree.xgboost.matrix;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostUtils;
import water.fvec.Frame;
public class FrameMatrixLoader extends MatrixLoader {
private final XGBoostModelInfo _modelInfo;
private final XGBoostModel.XGBoostParameters _parms;
private final boolean _sparse;
private final Frame _trainFrame;
private final Frame _validFrame;
public FrameMatrixLoader(XGBoostModel model, Frame train, Frame validFrame) {
_modelInfo = model.model_info();
_parms = model._parms;
_sparse = model._output._sparse;
_trainFrame = train;
_validFrame = validFrame;
}
@Override
public DMatrixProvider makeLocalTrainMatrix() {
return XGBoostUtils.convertFrameToDMatrix(
_modelInfo.dataInfo(),
_trainFrame,
_parms._response_column,
_parms._weights_column,
_parms._offset_column,
_sparse
);
}
@Override
public boolean hasValidationFrame() {
return _validFrame != null;
}
@Override
public DMatrixProvider makeLocalValidMatrix() {
return XGBoostUtils.convertFrameToDMatrix(
_modelInfo.dataInfo(),
_validFrame,
_parms._response_column,
_parms._weights_column,
_parms._offset_column,
_sparse
);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/MatrixFactoryUtils.java
|
package hex.tree.xgboost.matrix;
import water.fvec.Chunk;
public class MatrixFactoryUtils {
public static int setResponseAndWeightAndOffset(
Chunk[] chunks, int respIdx, int weightIdx, int offsetIdx, float[] resp, float[] weights, float[] offsets,
int j, int i
) {
if (weightIdx != -1) {
if (chunks[weightIdx].atd(i) == 0) {
return j;
}
weights[j] = (float) chunks[weightIdx].atd(i);
}
if (offsetIdx >= 0) {
offsets[j] = (float) chunks[offsetIdx].atd(i);
}
if (respIdx != -1) {
resp[j++] = (float) chunks[respIdx].atd(i);
}
return j;
}
public static int setResponseWeightAndOffset(
Chunk weightChunk, Chunk offsetChunk, Chunk respChunk, float[] resp, float[] weights, float [] offsets,
int j, int i
) {
if (weightChunk != null) {
if(weightChunk.atd(i) == 0) {
return j;
}
weights[j] = (float) weightChunk.atd(i);
}
if (offsetChunk != null) {
offsets[j] = (float) offsetChunk.atd(i);
}
resp[j++] = (float) respChunk.atd(i);
return j;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/MatrixLoader.java
|
package hex.tree.xgboost.matrix;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.XGBoostError;
import water.Iced;
import java.util.Arrays;
import java.util.Objects;
public abstract class MatrixLoader extends Iced<MatrixLoader> {
public static abstract class DMatrixProvider {
protected long actualRows;
protected float[] response;
protected float[] weights;
protected float[] offsets;
protected DMatrixProvider(long actualRows, float[] response, float[] weights, float[] offsets) {
this.actualRows = actualRows;
this.response = response;
this.weights = weights;
this.offsets = offsets;
}
protected abstract DMatrix makeDMatrix() throws XGBoostError;
@SuppressWarnings("unused") // used for debugging
public abstract void print(int nrow);
protected void dispose() {}
public final DMatrix get() throws XGBoostError {
DMatrix mat = makeDMatrix();
dispose();
assert mat.rowNum() == actualRows;
mat.setLabel(response);
if (weights != null) {
mat.setWeight(weights);
}
if (offsets != null) {
mat.setBaseMargin(offsets);
}
return mat;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof DMatrixProvider)) return false;
DMatrixProvider that = (DMatrixProvider) o;
return actualRows == that.actualRows &&
Arrays.equals(response, that.response) &&
Arrays.equals(weights, that.weights) &&
Arrays.equals(offsets, that.offsets);
}
@Override
public int hashCode() {
int result = Objects.hash(actualRows);
result = 31 * result + Arrays.hashCode(response);
result = 31 * result + Arrays.hashCode(weights);
result = 31 * result + Arrays.hashCode(offsets);
return result;
}
}
public abstract DMatrixProvider makeLocalTrainMatrix();
public abstract DMatrixProvider makeLocalValidMatrix();
public abstract boolean hasValidationFrame();
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/RemoteMatrixLoader.java
|
package hex.tree.xgboost.matrix;
import hex.tree.xgboost.task.XGBoostUploadMatrixTask;
import ai.h2o.xgboost4j.java.util.BigDenseMatrix;
import water.Key;
import java.util.HashMap;
import java.util.Map;
public class RemoteMatrixLoader extends MatrixLoader {
static abstract class RemoteMatrix {
XGBoostUploadMatrixTask.MatrixData data;
abstract MatrixLoader.DMatrixProvider make();
}
static class RemoteSparseMatrix extends RemoteMatrix {
final SparseMatrixDimensions dims;
final SparseMatrix matrix;
RemoteSparseMatrix(SparseMatrixDimensions dims) {
this.dims = dims;
this.matrix = SparseMatrixFactory.allocateCSRMatrix(dims);
}
@Override
MatrixLoader.DMatrixProvider make() {
return SparseMatrixFactory.toDMatrix(matrix, dims, data.actualRows, data.shape, data.resp, data.weights, data.offsets);
}
}
static class RemoteDenseMatrix extends RemoteMatrix {
final XGBoostUploadMatrixTask.DenseMatrixDimensions dims;
final BigDenseMatrix matrix;
RemoteDenseMatrix(XGBoostUploadMatrixTask.DenseMatrixDimensions dims) {
this.dims = dims;
this.matrix = new BigDenseMatrix(dims.rows, dims.cols);
}
@Override
DMatrixProvider make() {
return new DenseMatrixFactory.DenseDMatrixProvider(data.actualRows, data.resp, data.weights, data.offsets, matrix);
}
}
private static final Map<String, RemoteMatrix> REGISTRY = new HashMap<>();
public static void initSparse(String key, SparseMatrixDimensions dims) {
RemoteSparseMatrix m = new RemoteSparseMatrix(dims);
REGISTRY.put(key, m);
}
public static void sparseChunk(String key, XGBoostUploadMatrixTask.SparseMatrixChunk chunk) {
RemoteSparseMatrix m = (RemoteSparseMatrix) REGISTRY.get(key);
long nonZeroCount = m.dims._precedingNonZeroElementsCounts[chunk.id];
int rwRow = m.dims._precedingRowCounts[chunk.id];
SparseMatrixFactory.NestedArrayPointer rowHeaderPointer = new SparseMatrixFactory.NestedArrayPointer(rwRow);
SparseMatrixFactory.NestedArrayPointer dataPointer = new SparseMatrixFactory.NestedArrayPointer(nonZeroCount);
for (int i = 0; i < chunk.rowHeader.length; i++) {
rowHeaderPointer.setAndIncrement(m.matrix._rowHeaders, chunk.rowHeader[i]);
}
for (int i = 0; i < chunk.data.length; i++) {
dataPointer.set(m.matrix._sparseData, chunk.data[i]);
dataPointer.set(m.matrix._colIndices, chunk.colIndices[i]);
dataPointer.increment();
}
}
public static void initDense(String key, XGBoostUploadMatrixTask.DenseMatrixDimensions dims) {
RemoteDenseMatrix m = new RemoteDenseMatrix(dims);
REGISTRY.put(key, m);
}
public static void denseChunk(String key, XGBoostUploadMatrixTask.DenseMatrixChunk chunk) {
RemoteDenseMatrix m = (RemoteDenseMatrix) REGISTRY.get(key);
for (long i = 0; i < chunk.data.length; i++) {
m.matrix.set(i + (m.dims.rowOffsets[chunk.id] * m.dims.cols), chunk.data[(int) i]);
}
}
public static void matrixData(String key, XGBoostUploadMatrixTask.MatrixData data) {
REGISTRY.get(key).data = data;
}
public static void cleanup(String key) {
REGISTRY.remove(key);
}
private final Key<?> modelKey;
public RemoteMatrixLoader(Key<?> modelKey) {
this.modelKey = modelKey;
}
@Override
public DMatrixProvider makeLocalTrainMatrix() {
return REGISTRY.remove(trainMatrixKey(modelKey)).make();
}
public static String trainMatrixKey(Key<?> modelKey) {
return modelKey.toString() + "_train";
}
@Override
public boolean hasValidationFrame() {
return REGISTRY.containsKey(validMatrixKey(modelKey));
}
@Override
public DMatrixProvider makeLocalValidMatrix() {
return REGISTRY.remove(validMatrixKey(modelKey)).make();
}
public static String validMatrixKey(Key<?> modelKey) {
return modelKey.toString() + "_valid";
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/SparseMatrix.java
|
package hex.tree.xgboost.matrix;
/**
* Sparse Matrix representation for XGBoost
*
* CSR:
* long[] rowHeaders = new long[] {0, 2, 4, 7}; // offsets
* float[] data = new float[] {1f,2f, 4f,3f, 3f,1f,2f}; // non-zeros across each row
* int[] colIndex = new int[] {0, 2, 0, 3, 0, 1, 2}; // col index for each non-zero
*/
public final class SparseMatrix {
/**
* Maximum size of one dimension of SPARSE matrix with data. Sparse matrix is square matrix.
*/
public static int MAX_DIM = Integer.MAX_VALUE - 10;
public final float[][] _sparseData;
public final long[][] _rowHeaders;
public final int[][] _colIndices;
/**
* Constructs a {@link SparseMatrix} instance
*
* @param sparseData Non-zero data of a sparse matrix
* @param rowIndices Indices to elements in sparseData rows begin with
* @param colIndices Column indices of elements in sparseData
*/
public SparseMatrix(final float[][] sparseData, final long[][] rowIndices, final int[][] colIndices) {
_sparseData = sparseData;
_rowHeaders = rowIndices;
_colIndices = colIndices;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/SparseMatrixDimensions.java
|
package hex.tree.xgboost.matrix;
import water.BootstrapFreezable;
import water.Iced;
import water.util.ArrayUtils;
/**
* Dimensions of a Sparse Matrix
*/
public final class SparseMatrixDimensions extends Iced<SparseMatrixDimensions> implements BootstrapFreezable<SparseMatrixDimensions> {
public final int[] _precedingRowCounts;
public final long[] _precedingNonZeroElementsCounts;
public final long _nonZeroElementsCount;
public final int _rowHeadersCount;
/**
* Constructs an instance of {@link SparseMatrixDimensions}
*
* @param nonZeroElementsCounts Number of non-zero elements (number of elements in sparse matrix). Also
* number of column indices.
* @param rowIndicesCounts Number of indices of elements rows begin with
*/
public SparseMatrixDimensions(int[] nonZeroElementsCounts, int[] rowIndicesCounts) {
int precedingRowCount = 0;
long precedingNonZeroCount = 0;
_precedingRowCounts = new int[rowIndicesCounts.length];
_precedingNonZeroElementsCounts = new long[nonZeroElementsCounts.length];
assert rowIndicesCounts.length == nonZeroElementsCounts.length;
for (int i = 0; i < nonZeroElementsCounts.length; i++) {
_precedingRowCounts[i] = precedingRowCount;
_precedingNonZeroElementsCounts[i] = precedingNonZeroCount;
precedingRowCount += rowIndicesCounts[i];
precedingNonZeroCount += nonZeroElementsCounts[i];
}
_nonZeroElementsCount = ArrayUtils.suml(nonZeroElementsCounts);
_rowHeadersCount = ArrayUtils.sum(rowIndicesCounts) + 1;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/matrix/SparseMatrixFactory.java
|
package hex.tree.xgboost.matrix;
import hex.DataInfo;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.XGBoostError;
import water.H2O;
import water.LocalMR;
import water.MrFun;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.ArrayUtils;
import java.util.Arrays;
import java.util.Objects;
import static hex.tree.xgboost.matrix.MatrixFactoryUtils.setResponseAndWeightAndOffset;
import static hex.tree.xgboost.matrix.MatrixFactoryUtils.setResponseWeightAndOffset;
import static water.MemoryManager.*;
/*
- truly sparse matrix - no categoricals
- collect all nonzeros column by column (in parallel), then stitch together into final data structures
*/
public class SparseMatrixFactory {
public static MatrixLoader.DMatrixProvider csr(
Frame frame, int[] chunksIds, Vec weightsVec, Vec offsetsVec, Vec responseVec, // for setupLocal
DataInfo di, float[] resp, float[] weights, float[] offsets
) {
SparseMatrixDimensions sparseMatrixDimensions = calculateCSRMatrixDimensions(frame, chunksIds, weightsVec, di);
SparseMatrix sparseMatrix = allocateCSRMatrix(sparseMatrixDimensions);
int actualRows = initializeFromChunkIds(
frame, chunksIds, weightsVec, offsetsVec,
di, sparseMatrix, sparseMatrixDimensions,
responseVec, resp, weights, offsets);
return toDMatrix(sparseMatrix, sparseMatrixDimensions, actualRows, di.fullN(), resp, weights, offsets);
}
public static DMatrix csr(
Chunk[] chunks, int weight, int respIdx, int offsetIdx, // for MR task
DataInfo di, float[] resp, float[] weights, float[] offsets
) throws XGBoostError {
SparseMatrixDimensions sparseMatrixDimensions = calculateCSRMatrixDimensions(chunks, di, weight);
SparseMatrix sparseMatrix = allocateCSRMatrix(sparseMatrixDimensions);
int actualRows = initializeFromChunks(
chunks, weight,
di, sparseMatrix._rowHeaders, sparseMatrix._sparseData, sparseMatrix._colIndices,
respIdx, resp, weights, offsetIdx, offsets);
return toDMatrix(sparseMatrix, sparseMatrixDimensions, actualRows, di.fullN(), resp, weights, offsets).get();
}
public static class SparseDMatrixProvider extends MatrixLoader.DMatrixProvider {
private long[][] rowHeaders;
private int[][] colIndices;
private float[][] sparseData;
private DMatrix.SparseType csr;
private int shape;
private long nonZeroElementsCount;
public SparseDMatrixProvider(
long[][] rowHeaders,
int[][] colIndices,
float[][] sparseData,
DMatrix.SparseType csr,
int shape,
long nonZeroElementsCount,
int actualRows,
float[] response,
float[] weights,
float[] offsets
) {
super(actualRows, response, weights, offsets);
this.rowHeaders = rowHeaders;
this.colIndices = colIndices;
this.sparseData = sparseData;
this.csr = csr;
this.shape = shape;
this.nonZeroElementsCount = nonZeroElementsCount;
}
@Override
public DMatrix makeDMatrix() throws XGBoostError {
return new DMatrix(rowHeaders, colIndices, sparseData, csr, shape, (int) actualRows + 1, nonZeroElementsCount);
}
@Override
public void print(int nrow) {
NestedArrayPointer r = new NestedArrayPointer();
NestedArrayPointer d = new NestedArrayPointer();
long elemIndex = 0;
r.increment();
for (int i = 0; i < (nrow > 0 ? nrow : actualRows); i++) {
System.out.print(i + ":\t");
long rowEnd = r.get(rowHeaders);
r.increment();
for (; elemIndex < rowEnd; elemIndex++) {
System.out.print(d.get(colIndices) + ":" + d.get(sparseData) + "\t");
d.increment();
}
System.out.print(response[i]);
System.out.println();
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
SparseDMatrixProvider that = (SparseDMatrixProvider) o;
return shape == that.shape &&
nonZeroElementsCount == that.nonZeroElementsCount &&
Arrays.deepEquals(rowHeaders, that.rowHeaders) &&
Arrays.deepEquals(colIndices, that.colIndices) &&
Arrays.deepEquals(sparseData, that.sparseData) &&
csr == that.csr;
}
@Override
public int hashCode() {
int result = Objects.hash(super.hashCode(), csr, shape, nonZeroElementsCount);
result = 31 * result + Arrays.hashCode(rowHeaders);
result = 31 * result + Arrays.hashCode(colIndices);
result = 31 * result + Arrays.hashCode(sparseData);
return result;
}
}
public static SparseDMatrixProvider toDMatrix(
SparseMatrix sm, SparseMatrixDimensions smd, int actualRows, int shape, float[] resp, float[] weights, float[] offsets) {
return new SparseDMatrixProvider(
sm._rowHeaders, sm._colIndices, sm._sparseData, DMatrix.SparseType.CSR, shape, smd._nonZeroElementsCount,
actualRows, resp, weights, offsets
);
}
static class NestedArrayPointer {
int _row, _col;
public NestedArrayPointer() {
}
public NestedArrayPointer(long pos) {
this._row = (int) (pos / SparseMatrix.MAX_DIM);
this._col = (int) (pos % SparseMatrix.MAX_DIM);
}
void increment() {
_col++;
if(_col == SparseMatrix.MAX_DIM){
_col = 0;
_row++;
}
}
void set(long[][] dest, long val) {
dest[_row][_col] = val;
}
void set(float[][] dest, float val) {
dest[_row][_col] = val;
}
void set(int[][] dest, int val) {
dest[_row][_col] = val;
}
void setAndIncrement(long[][] dest, long val) {
set(dest, val);
increment();
}
public long get(long[][] dest) {
return dest[_row][_col];
}
public int get(int[][] dest) {
return dest[_row][_col];
}
public float get(float[][] dest) {
return dest[_row][_col];
}
}
public static int initializeFromChunkIds(
Frame frame, int[] chunks, Vec weightsVec, Vec offsetsVec, DataInfo di,
SparseMatrix matrix, SparseMatrixDimensions dimensions,
Vec respVec, float[] resp, float[] weights, float[] offsets
) {
InitializeCSRMatrixFromChunkIdsMrFun fun = new InitializeCSRMatrixFromChunkIdsMrFun(
frame, chunks, weightsVec, offsetsVec, di, matrix, dimensions, respVec, resp, weights, offsets
);
H2O.submitTask(new LocalMR(fun, chunks.length)).join();
return ArrayUtils.sum(fun._actualRows);
}
private static class InitializeCSRMatrixFromChunkIdsMrFun extends MrFun<InitializeCSRMatrixFromChunkIdsMrFun> {
Frame _frame;
int[] _chunks;
Vec _weightVec;
Vec _offsetsVec;
DataInfo _di;
SparseMatrix _matrix;
SparseMatrixDimensions _dims;
Vec _respVec;
float[] _resp;
float[] _weights;
float[] _offsets;
// OUT
int[] _actualRows;
InitializeCSRMatrixFromChunkIdsMrFun(
Frame frame, int[] chunks, Vec weightVec, Vec offsetVec, DataInfo di,
SparseMatrix matrix, SparseMatrixDimensions dimensions,
Vec respVec, float[] resp, float[] weights, float[] offsets
) {
_actualRows = new int[chunks.length];
_frame = frame;
_chunks = chunks;
_weightVec = weightVec;
_offsetsVec = offsetVec;
_di = di;
_matrix = matrix;
_dims = dimensions;
_respVec = respVec;
_resp = resp;
_weights = weights;
_offsets = offsets;
}
@Override
protected void map(int chunkIdx) {
int chunk = _chunks[chunkIdx];
long nonZeroCount = _dims._precedingNonZeroElementsCounts[chunkIdx];
int rwRow = _dims._precedingRowCounts[chunkIdx];
NestedArrayPointer rowHeaderPointer = new NestedArrayPointer(rwRow);
NestedArrayPointer dataPointer = new NestedArrayPointer(nonZeroCount);
Chunk weightChunk = _weightVec != null ? _weightVec.chunkForChunkIdx(chunk) : null;
Chunk offsetChunk = _offsetsVec != null ? _offsetsVec.chunkForChunkIdx(chunk) : null;
Chunk respChunk = _respVec.chunkForChunkIdx(chunk);
Chunk[] featChunks = new Chunk[_frame.vecs().length];
for (int i = 0; i < featChunks.length; i++) {
featChunks[i] = _frame.vecs()[i].chunkForChunkIdx(chunk);
}
for(int i = 0; i < respChunk._len; i++) {
if (weightChunk != null && weightChunk.atd(i) == 0) continue;
rowHeaderPointer.setAndIncrement(_matrix._rowHeaders, nonZeroCount);
_actualRows[chunkIdx]++;
for (int j = 0; j < _di._cats; j++) {
dataPointer.set(_matrix._sparseData, 1);
if (featChunks[j].isNA(i)) {
dataPointer.set(_matrix._colIndices, _di.getCategoricalId(j, Float.NaN));
} else {
dataPointer.set(_matrix._colIndices, _di.getCategoricalId(j, featChunks[j].at8(i)));
}
dataPointer.increment();
nonZeroCount++;
}
for (int j = 0; j < _di._nums; j++) {
float val = (float) featChunks[_di._cats + j].atd(i);
if (val != 0) {
dataPointer.set(_matrix._sparseData, val);
dataPointer.set(_matrix._colIndices, _di._catOffsets[_di._catOffsets.length - 1] + j);
dataPointer.increment();
nonZeroCount++;
}
}
rwRow = setResponseWeightAndOffset(weightChunk, offsetChunk, respChunk, _resp, _weights, _offsets, rwRow, i);
}
rowHeaderPointer.set(_matrix._rowHeaders, nonZeroCount);
}
}
private static int initializeFromChunks(
Chunk[] chunks, int weight, DataInfo di, long[][] rowHeaders, float[][] data, int[][] colIndex,
int respIdx, float[] resp, float[] weights, int offsetIdx, float[] offsets
) {
int actualRows = 0;
int nonZeroCount = 0;
int rwRow = 0;
NestedArrayPointer rowHeaderPointer = new NestedArrayPointer();
NestedArrayPointer dataPointer = new NestedArrayPointer();
for (int i = 0; i < chunks[0].len(); i++) {
if (weight != -1 && chunks[weight].atd(i) == 0) continue;
actualRows++;
rowHeaderPointer.setAndIncrement(rowHeaders, nonZeroCount);
for (int j = 0; j < di._cats; j++) {
dataPointer.set(data, 1); //one-hot encoding
if (chunks[j].isNA(i)) {
dataPointer.set(colIndex, di.getCategoricalId(j, Float.NaN));
} else {
dataPointer.set(colIndex, di.getCategoricalId(j, chunks[j].at8(i)));
}
dataPointer.increment();
nonZeroCount++;
}
for (int j = 0; j < di._nums; j++) {
float val = (float) chunks[di._cats + j].atd(i);
if (val != 0) {
dataPointer.set(data, val);
dataPointer.set(colIndex, di._catOffsets[di._catOffsets.length - 1] + j);
dataPointer.increment();
nonZeroCount++;
}
}
rwRow = setResponseAndWeightAndOffset(chunks, respIdx, weight, offsetIdx, resp, weights, offsets, rwRow, i);
}
rowHeaderPointer.set(rowHeaders, nonZeroCount);
return actualRows;
}
/**
* Creates a {@link SparseMatrix} object with pre-instantiated backing arrays for row-oriented compression schema (CSR).
* All backing arrays are allocated using MemoryManager.
*
* @param sparseMatrixDimensions Dimensions of a sparse matrix
* @return An instance of {@link SparseMatrix} with pre-allocated backing arrays.
*/
public static SparseMatrix allocateCSRMatrix(SparseMatrixDimensions sparseMatrixDimensions) {
// Number of rows in non-zero elements matrix
final int dataRowsNumber = (int) (sparseMatrixDimensions._nonZeroElementsCount / SparseMatrix.MAX_DIM);
final int dataLastRowSize = (int)(sparseMatrixDimensions._nonZeroElementsCount % SparseMatrix.MAX_DIM);
//Number of rows in matrix with row indices
final int rowIndicesRowsNumber = (int)(sparseMatrixDimensions._rowHeadersCount / SparseMatrix.MAX_DIM);
final int rowIndicesLastRowSize = (int)(sparseMatrixDimensions._rowHeadersCount % SparseMatrix.MAX_DIM);
// Number of rows in matrix with column indices of sparse matrix non-zero elements
// There is one column index per each non-zero element, no need to recalculate.
final int colIndicesRowsNumber = dataRowsNumber;
final int colIndicesLastRowSize = dataLastRowSize;
// Sparse matrix elements (non-zero elements)
float[][] sparseData = new float[dataLastRowSize == 0 ? dataRowsNumber : dataRowsNumber + 1][];
int iterationLimit = dataLastRowSize == 0 ? sparseData.length : sparseData.length - 1;
for (int sparseDataRow = 0; sparseDataRow < iterationLimit; sparseDataRow++) {
sparseData[sparseDataRow] = malloc4f(SparseMatrix.MAX_DIM);
}
if (dataLastRowSize > 0) {
sparseData[sparseData.length - 1] = malloc4f(dataLastRowSize);
}
// Row indices
long[][] rowIndices = new long[rowIndicesLastRowSize == 0 ? rowIndicesRowsNumber : rowIndicesRowsNumber + 1][];
iterationLimit = rowIndicesLastRowSize == 0 ? rowIndices.length : rowIndices.length - 1;
for (int rowIndicesRow = 0; rowIndicesRow < iterationLimit; rowIndicesRow++) {
rowIndices[rowIndicesRow] = malloc8(SparseMatrix.MAX_DIM);
}
if (rowIndicesLastRowSize > 0) {
rowIndices[rowIndices.length - 1] = malloc8(rowIndicesLastRowSize);
}
// Column indices
int[][] colIndices = new int[colIndicesLastRowSize == 0 ? colIndicesRowsNumber : colIndicesRowsNumber + 1][];
iterationLimit = colIndicesLastRowSize == 0 ? colIndices.length : colIndices.length - 1;
for (int colIndicesRow = 0; colIndicesRow < iterationLimit; colIndicesRow++) {
colIndices[colIndicesRow] = malloc4(SparseMatrix.MAX_DIM);
}
if (colIndicesLastRowSize > 0) {
colIndices[colIndices.length - 1] = malloc4(colIndicesLastRowSize);
}
// Wrap backing arrays into a SparseMatrix object and return them
return new SparseMatrix(sparseData, rowIndices, colIndices);
}
protected static SparseMatrixDimensions calculateCSRMatrixDimensions(Chunk[] chunks, DataInfo di, int weightColIndex){
int[] nonZeroElementsCounts = new int[1];
int[] rowIndicesCounts = new int[1];
for (int i = 0; i < chunks[0].len(); i++) {
// Rows with zero weights are going to be ignored
if (weightColIndex != -1 && chunks[weightColIndex].atd(i) == 0) continue;
rowIndicesCounts[0]++;
nonZeroElementsCounts[0] += di._cats;
for (int j = 0; j < di._nums; j++) {
double val = chunks[di._cats + j].atd(i);
if (val != 0) {
nonZeroElementsCounts[0]++;
}
}
}
return new SparseMatrixDimensions(nonZeroElementsCounts, rowIndicesCounts);
}
public static SparseMatrixDimensions calculateCSRMatrixDimensions(Frame f, int[] chunkIds, Vec w, DataInfo di) {
CalculateCSRMatrixDimensionsMrFun fun = new CalculateCSRMatrixDimensionsMrFun(f, di, w, chunkIds);
H2O.submitTask(new LocalMR(fun, chunkIds.length)).join();
return new SparseMatrixDimensions(fun._nonZeroElementsCounts, fun._rowIndicesCounts);
}
private static class CalculateCSRMatrixDimensionsMrFun extends MrFun<CalculateCSRMatrixDimensionsMrFun> {
private Frame _f;
private DataInfo _di;
private Vec _w;
private int[] _chunkIds;
// OUT
private int[] _rowIndicesCounts;
private int[] _nonZeroElementsCounts;
CalculateCSRMatrixDimensionsMrFun(Frame f, DataInfo di, Vec w, int[] chunkIds) {
_f = f;
_di = di;
_w = w;
_chunkIds = chunkIds;
_rowIndicesCounts = new int[chunkIds.length];
_nonZeroElementsCounts = new int[chunkIds.length];
}
@Override
protected void map(int i) {
final int cidx = _chunkIds[i];
int rowIndicesCount = 0;
int nonZeroElementsCount = 0;
if (_di._nums == 0) {
if (_w == null) {
// no weights and only categoricals => sizing is trivial
rowIndicesCount = _f.anyVec().chunkForChunkIdx(cidx)._len;
nonZeroElementsCount = rowIndicesCount * _di._cats;
} else {
Chunk ws = _w.chunkForChunkIdx(cidx);
int nzWeights = 0;
for (int r = 0; r < ws._len; r++)
if (ws.atd(r) != 0) {
nzWeights++;
}
rowIndicesCount += nzWeights;
nonZeroElementsCount += nzWeights * _di._cats;
}
} else {
Chunk[] cs = new Chunk[_di._nums];
for (int c = 0; c < cs.length; c++) {
cs[c] = _f.vec(_di._cats + c).chunkForChunkIdx(cidx);
}
Chunk ws = _w != null ? _w.chunkForChunkIdx(cidx) : null;
for (int r = 0; r < cs[0]._len; r++) {
if (ws != null && ws.atd(r) == 0) continue;
rowIndicesCount++;
nonZeroElementsCount += _di._cats;
for (int j = 0; j < _di._nums; j++) {
if (cs[j].atd(r) != 0) {
nonZeroElementsCount++;
}
}
}
}
_rowIndicesCounts[i] = rowIndicesCount;
_nonZeroElementsCounts[i] = nonZeroElementsCount;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/AssignLeafNodeTask.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.util.FVec;
import hex.DataInfo;
import hex.Model;
import hex.tree.xgboost.XGBoostOutput;
import water.DKV;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.fvec.Vec;
import water.util.VecUtils;
public abstract class AssignLeafNodeTask extends MRTask<AssignLeafNodeTask> {
protected final Predictor _p;
protected final String[] _names;
private final DataInfo _di;
private final boolean _sparse;
private byte _resultType;
protected AssignLeafNodeTask(DataInfo di, XGBoostOutput output, byte[] boosterBytes, byte resultType) {
this._p = PredictorFactory.makePredictor(boosterBytes, null, false);
this._di = di;
this._sparse = output._sparse;
this._names = makeNames(output._ntrees, output.nclasses());
this._resultType = resultType;
}
protected abstract void assignNodes(final FVec input, final NewChunk[] outs);
private String[] makeNames(int ntrees, int nclass) {
nclass = nclass > 2 ? nclass : 1;
String[] names = new String[ntrees * nclass];
for (int t = 0; t < ntrees; t++) {
for (int c = 0; c < nclass; c++) {
names[t*nclass + c] = "T" + (t+1) + ".C" + (c+1);
}
}
return names;
}
@Override
public void map(Chunk chks[], NewChunk[] idx) {
MutableOneHotEncoderFVec inputVec = new MutableOneHotEncoderFVec(_di, _sparse);
double[] input = new double[chks.length];
for (int row = 0; row < chks[0]._len; row++) {
for (int i = 0; i < chks.length; i++)
input[i] = chks[i].atd(row);
inputVec.setInput(input);
assignNodes(inputVec, idx);
}
}
public Frame execute(Frame adaptFrm, Key<Frame> destKey) {
return doAll(_names.length, _resultType, adaptFrm).outputFrame(destKey, _names, null);
}
public static AssignLeafNodeTask make(
DataInfo di, XGBoostOutput output, byte[] boosterBytes, Model.LeafNodeAssignment.LeafNodeAssignmentType type) {
switch (type) {
case Path:
return new AssignTreePathTask(di, output, boosterBytes);
case Node_ID:
return new AssignLeafNodeIdTask(di, output, boosterBytes);
default:
throw new UnsupportedOperationException("Unknown leaf node assignment type: " + type);
}
}
static class AssignTreePathTask extends AssignLeafNodeTask {
public AssignTreePathTask(DataInfo di, XGBoostOutput output, byte[] boosterBytes) {
super(di, output, boosterBytes, Vec.T_STR);
}
@Override
protected void assignNodes(FVec input, NewChunk[] outs) {
String[] leafPaths = _p.predictLeafPath(input);
for (int i = 0; i < leafPaths.length; i++) {
outs[i].addStr(leafPaths[i]);
}
}
@Override
public Frame execute(Frame adaptFrm, Key<Frame> destKey) {
Frame res = super.execute(adaptFrm, destKey);
// convert to categorical
Vec vv;
Vec[] nvecs = new Vec[res.vecs().length];
for(int c = 0; c < res.vecs().length; c++) {
vv = res.vec(c);
try {
nvecs[c] = vv.toCategoricalVec();
} catch (Exception e) {
VecUtils.deleteVecs(nvecs, c);
throw e;
}
}
res.delete();
res = new Frame(destKey, _names, nvecs);
DKV.put(res);
return res;
}
}
static class AssignLeafNodeIdTask extends AssignLeafNodeTask {
public AssignLeafNodeIdTask(DataInfo di, XGBoostOutput output, byte[] boosterBytes) {
super(di, output, boosterBytes, Vec.T_NUM);
}
@Override
protected void assignNodes(FVec input, NewChunk[] outs) {
int[] leafIdx = _p.getBooster().predictLeaf(input, 0);
for (int i = 0; i < leafIdx.length; i++) {
outs[i].addNum(leafIdx[i]);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/AuxNodeWeights.java
|
package hex.tree.xgboost.predict;
import hex.genmodel.algos.xgboost.AuxNodeWeightsHelper;
import water.Key;
import water.Keyed;
/**
* Represents Auxiliary Tree-Node Weights; Auxiliary arbitrary weight the user chooses to store in addition
* to the weights XGBoost seen in model training
*/
public class AuxNodeWeights extends Keyed<AuxNodeWeights> {
public final byte[] _nodeWeightBytes;
public AuxNodeWeights(Key<AuxNodeWeights> key, double[][] nodeWeights) {
super(key);
_nodeWeightBytes = AuxNodeWeightsHelper.toBytes(nodeWeights);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/MutableOneHotEncoderFVec.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.util.FVec;
import hex.DataInfo;
import hex.genmodel.GenModel;
public class MutableOneHotEncoderFVec implements FVec {
private final DataInfo _di;
private final boolean _treatsZeroAsNA;
private final int[] _catMap;
private final int[] _catValues;
private final float[] _numValues;
private final float _notHot;
public MutableOneHotEncoderFVec(DataInfo di, boolean treatsZeroAsNA) {
_di = di;
_catValues = new int[_di._cats];
_treatsZeroAsNA = treatsZeroAsNA;
_notHot = _treatsZeroAsNA ? Float.NaN : 0;
if (_di._catOffsets == null) {
_catMap = new int[0];
} else {
_catMap = new int[_di._catOffsets[_di._cats]];
for (int c = 0; c < _di._cats; c++) {
for (int j = _di._catOffsets[c]; j < _di._catOffsets[c+1]; j++)
_catMap[j] = c;
}
}
_numValues = new float[_di._nums];
}
public void setInput(double[] input) {
GenModel.setCats(input, _catValues, _di._cats, _di._catOffsets, _di._useAllFactorLevels);
for (int i = 0; i < _numValues.length; i++) {
float val = (float) input[_di._cats + i];
_numValues[i] = _treatsZeroAsNA && (val == 0) ? Float.NaN : val;
}
}
@Override
public final float fvalue(int index) {
if (index >= _catMap.length)
return _numValues[index - _catMap.length];
final boolean isHot = _catValues[_catMap[index]] == index;
return isHot ? 1 : _notHot;
}
public void decodeAggregate(float[] encoded, float[] output) {
for (int c = 0; c < _di._cats; c++) {
float sum = 0;
for (int i = _di._catOffsets[c]; i < _di._catOffsets[c + 1]; i++) {
sum += encoded[i];
}
output[c] = sum;
}
int numStart = _di._catOffsets[_di._cats];
if (_di._nums >= 0)
System.arraycopy(encoded, numStart, output, _di._cats, _di._nums);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/PredictTreeSHAPSortingTask.java
|
package hex.tree.xgboost.predict;
import hex.DataInfo;
import hex.Model;
import hex.genmodel.algos.tree.ContributionComposer;
import hex.genmodel.algos.tree.TreeSHAPPredictor;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostOutput;
import water.MemoryManager;
import water.fvec.Chunk;
import water.fvec.NewChunk;
public class PredictTreeSHAPSortingTask extends PredictTreeSHAPTask {
private final boolean _outputAggregated;
private final int _topN;
private final int _bottomN;
private final boolean _compareAbs;
public PredictTreeSHAPSortingTask(DataInfo di, XGBoostModelInfo modelInfo, XGBoostOutput output,
Model.Contributions.ContributionsOptions options) {
super(di,modelInfo,output, options);
_outputAggregated = Model.Contributions.ContributionsOutputFormat.Compact.equals(options._outputFormat);
_topN = options._topN;
_bottomN = options._bottomN;
_compareAbs = options._compareAbs;
}
protected void fillInput(Chunk[] chks, int row, double[] input, float[] contribs, int[] contribNameIds) {
super.fillInput(chks, row, input, contribs);
for (int i = 0; i < contribNameIds.length; i++) {
contribNameIds[i] = i;
}
}
@Override
public void map(Chunk[] chks, NewChunk[] nc) {
MutableOneHotEncoderFVec rowFVec = new MutableOneHotEncoderFVec(_di, _output._sparse);
double[] input = MemoryManager.malloc8d(chks.length);
float[] contribs = MemoryManager.malloc4f(_di.fullN() + 1);
float[] output = _outputAggregated ? MemoryManager.malloc4f(chks.length) : contribs;
int[] contribNameIds = MemoryManager.malloc4(output.length);
TreeSHAPPredictor.Workspace workspace = _mojo.makeContributionsWorkspace();
for (int row = 0; row < chks[0]._len; row++) {
fillInput(chks, row, input, contribs, contribNameIds);
rowFVec.setInput(input);
// calculate Shapley values
_mojo.calculateContributions(rowFVec, contribs, workspace);
handleOutputFormat(rowFVec, contribs, output);
ContributionComposer contributionComposer = new ContributionComposer();
int[] contribNameIdsSorted = contributionComposer.composeContributions(
contribNameIds, output, _topN, _bottomN, _compareAbs);
addContribToNewChunk(contribs, contribNameIdsSorted, nc);
}
}
protected void addContribToNewChunk(float[] contribs, int[] contribNamesSorted, NewChunk[] nc) {
for (int i = 0, inputPointer = 0; i < nc.length-1; i+=2, inputPointer++) {
nc[i].addNum(contribNamesSorted[inputPointer]);
nc[i+1].addNum(contribs[contribNamesSorted[inputPointer]]);
}
nc[nc.length-1].addNum(contribs[contribs.length-1]); // bias
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/PredictTreeSHAPTask.java
|
package hex.tree.xgboost.predict;
import hex.DataInfo;
import hex.genmodel.algos.tree.TreeSHAPPredictor;
import hex.genmodel.algos.xgboost.XGBoostJavaMojoModel;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostOutput;
import water.MRTask;
import water.MemoryManager;
import water.fvec.Chunk;
import water.fvec.NewChunk;
import java.util.Arrays;
import static hex.Model.Contributions.ContributionsOptions;
import static hex.Model.Contributions.ContributionsOutputFormat;
public class PredictTreeSHAPTask extends MRTask<PredictTreeSHAPTask> {
protected final DataInfo _di;
protected final XGBoostModelInfo _modelInfo;
protected final XGBoostOutput _output;
protected final boolean _outputAggregated;
protected transient XGBoostJavaMojoModel _mojo;
public PredictTreeSHAPTask(DataInfo di, XGBoostModelInfo modelInfo, XGBoostOutput output,
ContributionsOptions options) {
_di = di;
_modelInfo = modelInfo;
_output = output;
_outputAggregated = ContributionsOutputFormat.Compact.equals(options._outputFormat);
}
@Override
protected void setupLocal() {
_mojo = new XGBoostJavaMojoModel(
_modelInfo._boosterBytes, _modelInfo.auxNodeWeightBytes(),
_output._names, _output._domains, _output.responseName(),
true
);
}
protected void fillInput(Chunk chks[], int row, double[] input, float[] contribs) {
for (int i = 0; i < chks.length; i++) {
input[i] = chks[i].atd(row);
}
Arrays.fill(contribs, 0);
}
@Override
public void map(Chunk[] chks, NewChunk[] nc) {
MutableOneHotEncoderFVec rowFVec = new MutableOneHotEncoderFVec(_di, _output._sparse);
double[] input = MemoryManager.malloc8d(chks.length);
float[] contribs = MemoryManager.malloc4f(_di.fullN() + 1);
float[] output = _outputAggregated ? MemoryManager.malloc4f(nc.length) : contribs;
TreeSHAPPredictor.Workspace workspace = _mojo.makeContributionsWorkspace();
for (int row = 0; row < chks[0]._len; row++) {
fillInput(chks, row, input, contribs);
rowFVec.setInput(input);
// calculate Shapley values
_mojo.calculateContributions(rowFVec, contribs, workspace);
handleOutputFormat(rowFVec, contribs, output);
addContribToNewChunk(output, nc);
}
}
protected void handleOutputFormat(final MutableOneHotEncoderFVec rowFVec, final float[] contribs, final float[] output) {
if (_outputAggregated) {
rowFVec.decodeAggregate(contribs, output);
output[output.length - 1] = contribs[contribs.length - 1]; // bias term
}
}
protected void addContribToNewChunk(final float[] contribs, final NewChunk[] nc) {
for (int i = 0; i < nc.length; i++) {
nc[i].addNum(contribs[i]);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/PredictTreeSHAPWithBackgroundTask.java
|
package hex.tree.xgboost.predict;
import hex.*;
import hex.genmodel.algos.xgboost.XGBoostJavaMojoModel;
import hex.genmodel.utils.DistributionFamily;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostOutput;
import water.MemoryManager;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import java.util.Arrays;
public class PredictTreeSHAPWithBackgroundTask extends ContributionsWithBackgroundFrameTask<PredictTreeSHAPWithBackgroundTask> {
protected final DataInfo _di;
protected final XGBoostModelInfo _modelInfo;
protected final XGBoostOutput _output;
protected final boolean _outputAggregated;
protected final boolean _outputSpace;
protected final Distribution _distribution;
protected transient XGBoostJavaMojoModel _mojo;
public PredictTreeSHAPWithBackgroundTask(DataInfo di, XGBoostModelInfo modelInfo, XGBoostOutput output,
Model.Contributions.ContributionsOptions options, Frame frame, Frame backgroundFrame, boolean perReference, boolean outputSpace) {
super(frame._key, backgroundFrame._key, perReference);
_di = di;
_modelInfo = modelInfo;
_output = output;
_outputAggregated = Model.Contributions.ContributionsOutputFormat.Compact.equals(options._outputFormat);
_outputSpace = outputSpace;
// FIXME: What's the proper way of getting the link here? _modelInfo._parameters._distribution is set to AUTO by default
_distribution = outputSpace ? (
_modelInfo._parameters.getDistributionFamily().equals(DistributionFamily.AUTO) && _output.isBinomialClassifier()
? DistributionFactory.getDistribution(DistributionFamily.bernoulli)
: DistributionFactory.getDistribution(_modelInfo._parameters)
): null;
}
@Override
protected void setupLocal() {
_mojo = new XGBoostJavaMojoModel(
_modelInfo._boosterBytes, _modelInfo.auxNodeWeightBytes(),
_output._names, _output._domains, _output.responseName(),
true
);
}
protected void fillInput(Chunk chks[], int row, double[] input) {
for (int i = 0; i < chks.length; i++) {
input[i] = chks[i].atd(row);
}
}
protected void addContribToNewChunk(double[] contribs, NewChunk[] nc) {
double transformationRatio = 1;
double biasTerm = contribs[contribs.length - 1];
if (_outputSpace) {
final double linkSpaceX = Arrays.stream(contribs).sum();
final double linkSpaceBg = biasTerm;
final double outSpaceX = _distribution.linkInv(linkSpaceX);
final double outSpaceBg = _distribution.linkInv(linkSpaceBg);
transformationRatio = Math.abs(linkSpaceX - linkSpaceBg) < 1e-6 ? 0 : (outSpaceX - outSpaceBg) / (linkSpaceX - linkSpaceBg);
biasTerm = outSpaceBg;
}
for (int i = 0; i < nc.length - 1; i++) {
nc[i].addNum(contribs[i] * transformationRatio);
}
nc[nc.length - 1].addNum(biasTerm);
}
@Override
protected void map(Chunk[] cs, Chunk[] bgCs, NewChunk[] ncs) {
MutableOneHotEncoderFVec rowFVec = new MutableOneHotEncoderFVec(_di, _output._sparse);
MutableOneHotEncoderFVec rowFBgVec = new MutableOneHotEncoderFVec(_di, _output._sparse);
double[] input = MemoryManager.malloc8d(cs.length);
double[] inputBg = MemoryManager.malloc8d(cs.length);
double[] contribs = MemoryManager.malloc8d(_outputAggregated ? ncs.length : _di.fullN() + 1);
for (int row = 0; row < cs[0]._len; row++) {
fillInput(cs, row, input);
rowFVec.setInput(input);
for (int bgRow = 0; bgRow < bgCs[0]._len; bgRow++) {
Arrays.fill(contribs, 0);
fillInput(bgCs, bgRow, inputBg);
rowFBgVec.setInput(inputBg);
// calculate Shapley values
_mojo.calculateInterventionalContributions(rowFVec, rowFBgVec, contribs, _outputAggregated ? _di._catOffsets : null, false);
// FIXME: This is questionable decision. It seems logical at first to assign the contribution to the level
// that was present in the data but since this is in the expanded features it might happen that the level
// now represented as one dimension is not used at all. To make it simpler to think about let's imagine
// GLM with category color x=red, b=blue. In GLM we can look and see that color.red has no importance (beta_{red} == 0) and
// the color.blue is the only important feature so the contribution is basically from switching blue
// from 1 to 0 not from blue to zero and red to one. Can we get such information in tree models?
// if (!_outputAggregated) {
// // make sure the contribution is on the level that's present in the foreground sample
// for (int i = 0; i < _di._catOffsets.length-1; i++) {
// final int fgIdx = Double.isNaN(input[i]) ? _di._catOffsets[i+1]-1 : _di._catOffsets[i] + (int)input[i];
// final int bgIdx = Double.isNaN(inputBg[i]) ? _di._catOffsets[i+1]-1 : _di._catOffsets[i] + (int)inputBg[i];
// contribs[fgIdx] += contribs[bgIdx];
// contribs[bgIdx] = 0;
// }
// }
addContribToNewChunk(contribs, ncs);
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/PredictorFactory.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.config.PredictorConfiguration;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeFactory;
import biz.k11i.xgboost.util.ModelReader;
import hex.genmodel.algos.xgboost.XGBoostJavaMojoModel;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteOrder;
public class PredictorFactory {
public static Predictor makePredictor(byte[] boosterBytes) {
return makePredictor(boosterBytes, null, true);
}
public static Predictor makePredictor(byte[] boosterBytes, byte[] auxNodeWeightBytes, boolean scoringOnly) {
PredictorConfiguration.Builder bldr = PredictorConfiguration.builder();
if (scoringOnly && unsafeTreesSupported()) {
bldr.regTreeFactory(UnsafeRegTreeFactory.INSTANCE);
}
PredictorConfiguration cfg = bldr.build();
try (InputStream is = new ByteArrayInputStream(boosterBytes)) {
Predictor p = new Predictor(is, cfg);
if (!scoringOnly && auxNodeWeightBytes != null)
XGBoostJavaMojoModel.updateNodeWeights(p, auxNodeWeightBytes);
return p;
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
static boolean unsafeTreesSupported() {
// XGBoost Predictor uses LE, we can only use our scoring if the system has the same endianness
return ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN);
}
private static class UnsafeRegTreeFactory implements RegTreeFactory {
private static final UnsafeRegTreeFactory INSTANCE = new UnsafeRegTreeFactory();
@Override
public RegTree loadTree(ModelReader reader) throws IOException {
return new XGBoostRegTree(reader);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/UpdateAuxTreeWeightsTask.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import hex.*;
import hex.genmodel.utils.DistributionFamily;
import hex.genmodel.utils.LinkFunctionType;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostOutput;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.NewChunk;
import water.util.ArrayUtils;
public class UpdateAuxTreeWeightsTask extends MRTask<UpdateAuxTreeWeightsTask> {
// IN
private final DistributionFamily _dist;
private final Predictor _p;
private final DataInfo _di;
private final boolean _sparse;
// OUT
private double[/*treeId*/][/*leafNodeId*/] _nodeWeights;
public UpdateAuxTreeWeightsTask(DistributionFamily dist, DataInfo di, XGBoostModelInfo modelInfo, XGBoostOutput output) {
_dist = dist;
_p = PredictorFactory.makePredictor(modelInfo._boosterBytes, null, false);
_di = di;
_sparse = output._sparse;
if (_p.getNumClass() > 2) {
throw new UnsupportedOperationException("Updating tree weights is currently not supported for multinomial models.");
}
if (_dist != DistributionFamily.gaussian && _dist != DistributionFamily.bernoulli) {
throw new UnsupportedOperationException("Updating tree weights is currently not supported for distribution " + _dist + ".");
}
}
private double[][] initNodeWeights() {
GBTree gbTree = (GBTree) _p.getBooster();
RegTree[] trees = gbTree.getGroupedTrees()[0];
double[][] nodeWeights = new double[trees.length][];
for (int i = 0; i < trees.length; i++) {
nodeWeights[i] = new double[trees[i].getStats().length];
}
return nodeWeights;
}
@Override
public void map(Chunk[] chks, NewChunk[] idx) {
_nodeWeights = initNodeWeights();
LinkFunction logit = LinkFunctionFactory.getLinkFunction(LinkFunctionType.logit);
RegTree[] trees = ((GBTree) _p.getBooster()).getGroupedTrees()[0];
MutableOneHotEncoderFVec inputVec = new MutableOneHotEncoderFVec(_di, _sparse);
int inputLength = chks.length - 1;
int weightIndex = chks.length - 1;
double[] input = new double[inputLength];
for (int row = 0; row < chks[0]._len; row++) {
double weight = chks[weightIndex].atd(row);
if (weight == 0 || Double.isNaN(weight))
continue;
for (int i = 0; i < input.length; i++)
input[i] = chks[i].atd(row);
inputVec.setInput(input);
int ntrees = _nodeWeights.length;
int[] leafIdx = _p.getBooster().predictLeaf(inputVec, ntrees);
assert leafIdx.length == ntrees: "Leaf indices (#idx=" + leafIdx.length + ") were not returned for all trees (#trees=" + ntrees + ").";
if (_dist == DistributionFamily.gaussian) {
for (int i = 0; i < leafIdx.length; i++) {
_nodeWeights[i][leafIdx[i]] += weight;
}
} else {
assert _dist == DistributionFamily.bernoulli;
double f = -_p.getBaseScore();
for (int i = 0; i < leafIdx.length; i++) {
RegTreeNode[] nodes = trees[i].getNodes();
double p = logit.linkInv(f);
double hessian = p * (1 - p);
_nodeWeights[i][leafIdx[i]] += weight * hessian;
f += nodes[leafIdx[i]].getLeafValue();
}
}
}
}
@Override
public void reduce(UpdateAuxTreeWeightsTask mrt) {
ArrayUtils.add(_nodeWeights, mrt._nodeWeights);
}
@Override
protected void postGlobal() {
GBTree gbTree = (GBTree) _p.getBooster();
RegTree[] trees = gbTree.getGroupedTrees()[0];
for (int i = 0; i < trees.length; i++) {
RegTreeNode[] nodes = trees[i].getNodes();
for (int j = nodes.length - 1; j >= 0; j--) {
RegTreeNode node = nodes[j];
int parentId = node.getParentIndex();
if (parentId < 0)
continue;
assert parentId < j: "Broken tree #" + i + ". Tree rollups assume parentId (=" + parentId + " < childId (=" + j + ").";
RegTreeNode parent = nodes[parentId];
_nodeWeights[i][parentId] = _nodeWeights[i][parent.getLeftChildIndex()] + _nodeWeights[i][parent.getRightChildIndex()];
}
}
}
public double[][] getNodeWeights() {
return _nodeWeights;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostBigScorePredict.java
|
package hex.tree.xgboost.predict;
import hex.Model;
import water.fvec.Chunk;
import water.fvec.Frame;
public interface XGBoostBigScorePredict extends Model.BigScorePredict {
@Override
XGBoostPredict initMap(Frame fr, Chunk[] chks);
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostJavaBigScoreChunkPredict.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import hex.DataInfo;
import hex.Model;
import hex.genmodel.algos.xgboost.XGBoostMojoModel;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostOutput;
import water.fvec.Chunk;
import water.fvec.Frame;
public class XGBoostJavaBigScoreChunkPredict implements XGBoostPredict, Model.BigScoreChunkPredict {
private final XGBoostOutput _output;
private final double _threshold;
private final Predictor _predictor;
private final MutableOneHotEncoderFVec _row;
private final int _offsetIndex;
private final boolean[] _usedColumns;
public XGBoostJavaBigScoreChunkPredict(
DataInfo di,
XGBoostOutput output,
XGBoostModel.XGBoostParameters parms,
double threshold,
Predictor predictor,
boolean[] usedColumns,
Frame data
) {
_output = output;
_threshold = threshold;
_predictor = predictor;
_row = new MutableOneHotEncoderFVec(di, _output._sparse);
_offsetIndex = data.find(parms._offset_column);
_usedColumns = usedColumns;
}
@Override
public double[] score0(Chunk[] chks, double offset, int row_in_chunk, double[] tmp, double[] preds) {
assert _output.nfeatures() == tmp.length;
for (int i = 0; i < tmp.length; i++) {
if (_usedColumns == null || _usedColumns[i]) {
tmp[i] = chks[i].atd(row_in_chunk);
}
}
_row.setInput(tmp);
float[] out;
if (_output.hasOffset()) {
out = _predictor.predict(_row, (float) offset);
} else if (offset != 0) {
throw new IllegalArgumentException("Model was not trained with offset_column, but offset != 0");
} else {
out = _predictor.predict(_row);
}
return XGBoostMojoModel.toPreds(tmp, out, preds, _output.nclasses(), _output._priorClassDist, _threshold);
}
public float[][] predict(Chunk[] cs) {
final float[][] preds = new float[cs[0]._len][];
final double[] tmp = new double[_output.nfeatures()];
for (int row = 0; row < cs[0]._len; row++) {
for (int col = 0; col < tmp.length; col++) {
if (_usedColumns == null || _usedColumns[col]) {
tmp[col] = cs[col].atd(row);
}
}
_row.setInput(tmp);
if (_offsetIndex >= 0) {
float offset = (float) cs[_offsetIndex].atd(row);
preds[row] = _predictor.predict(_row, offset);
} else {
preds[row] = _predictor.predict(_row);
}
}
return preds;
}
@Override
public void close() {}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostJavaBigScorePredict.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.gbm.GradBooster;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import hex.DataInfo;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostOutput;
import hex.tree.xgboost.XGBoostModelInfo;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.util.ArrayUtils;
public class XGBoostJavaBigScorePredict implements XGBoostBigScorePredict {
private final DataInfo _di;
private final XGBoostOutput _output;
private final XGBoostModel.XGBoostParameters _parms;
private final double _threshold;
private final Predictor _predictor;
private final boolean[] _usedColumns;
public XGBoostJavaBigScorePredict(
XGBoostModelInfo model_info,
XGBoostOutput output,
DataInfo di,
XGBoostModel.XGBoostParameters parms, double threshold
) {
_di = di;
_output = output;
_parms = parms;
_threshold = threshold;
_predictor = PredictorFactory.makePredictor(model_info._boosterBytes);
_usedColumns = findUsedColumns(_predictor.getBooster(), di, _output.nfeatures());
}
@Override
public XGBoostPredict initMap(Frame fr, Chunk[] chks) {
return new XGBoostJavaBigScoreChunkPredict(_di, _output, _parms, _threshold, _predictor, _usedColumns, fr);
}
/**
* For each input feature decides whether it is used by the model or not.
*
* @param booster booster
* @param di data info
* @param nFeatures number of features provided at training time
* @return boolean flag for each input feature indicating whether it is used or not, returns null if the used
* features cannot be determined or model uses all of them (null thus effectively means use everything to the caller)
*/
static boolean[] findUsedColumns(GradBooster booster, DataInfo di, final int nFeatures) {
if (! (booster instanceof GBTree)) {
return null;
}
int[] splitIndexToColumnIndex = di.coefOriginalColumnIndices();
assert ArrayUtils.maxValue(splitIndexToColumnIndex) < nFeatures; // this holds because feature columns go first, before special columns
boolean[] usedColumns = new boolean[nFeatures];
int usedCount = 0;
for (RegTree[] trees : ((GBTree) booster).getGroupedTrees()) {
for (RegTree tree : trees) {
for (RegTreeNode node : tree.getNodes()) {
if (node.isLeaf()) {
continue;
}
int column = splitIndexToColumnIndex[node.getSplitIndex()];
if (!usedColumns[column]) {
usedCount++;
usedColumns[column] = true;
if (splitIndexToColumnIndex.length == usedCount) { // all columns already used, abort
return null;
}
}
}
}
}
return usedColumns;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostJavaVariableImportance.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.Predictor;
import biz.k11i.xgboost.gbm.GBTree;
import biz.k11i.xgboost.tree.RegTree;
import biz.k11i.xgboost.tree.RegTreeNode;
import biz.k11i.xgboost.tree.RegTreeNodeStat;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostUtils;
import hex.tree.xgboost.util.FeatureScore;
import java.util.HashMap;
import java.util.Map;
public class XGBoostJavaVariableImportance implements XGBoostVariableImportance {
private final String[] _featureNames;
public XGBoostJavaVariableImportance(XGBoostModelInfo modelInfo) {
_featureNames = XGBoostUtils.assembleFeatureNames(modelInfo.dataInfo())._names;
}
@Override
public Map<String, FeatureScore> getFeatureScores(byte[] boosterBytes) {
Predictor predictor = PredictorFactory.makePredictor(boosterBytes, null, false);
Map<String, FeatureScore> featureScore = new HashMap<>();
if (!(predictor.getBooster() instanceof GBTree)) {
return featureScore;
}
GBTree gbm = (GBTree) predictor.getBooster();
final RegTree[][] trees = gbm.getGroupedTrees();
for (final RegTree[] treeGroup : trees) {
for (int j = 0; j < treeGroup.length; j++) {
RegTree t = treeGroup[j];
for (int k = 0; k < t.getNodes().length; k++) {
RegTreeNode node = t.getNodes()[k];
if (node.isLeaf()) continue;
RegTreeNodeStat stat = t.getStats()[k];
FeatureScore fs = new FeatureScore();
fs._gain = stat.getGain();
fs._cover = stat.getCover();
final String fid = _featureNames[node.getSplitIndex()];
if (featureScore.containsKey(fid)) {
featureScore.get(fid).add(fs);
} else {
featureScore.put(fid, fs);
}
}
}
}
return featureScore;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostModelMetrics.java
|
package hex.tree.xgboost.predict;
import hex.Model;
import hex.ModelMetrics;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostOutput;
import hex.tree.xgboost.task.XGBoostScoreTask;
import water.Scope;
import water.fvec.Frame;
import water.fvec.Vec;
import water.udf.CFuncRef;
import java.util.Arrays;
import static water.fvec.Vec.T_CAT;
import static water.fvec.Vec.T_NUM;
public class XGBoostModelMetrics {
private final XGBoostOutput _output;
private final Frame _data;
private final Frame _originalData;
private final XGBoostModel _model;
private final XGBoostScoreTask _task;
public XGBoostModelMetrics(
XGBoostOutput output,
Frame data,
Frame originalData,
boolean isTrain,
XGBoostModel model,
CFuncRef customMetricFunc
) {
_output = output;
_data = data;
_originalData = originalData;
_model = model;
_task = new XGBoostScoreTask(
_output, _data.find(_model._parms._weights_column), isTrain, _model, customMetricFunc
);
}
public ModelMetrics compute() {
Scope.enter();
try {
Frame preds = Scope.track(runScoreTask());
if (_output.nclasses() == 1) {
Vec pred = preds.vec(0);
return _task._metricBuilder.makeModelMetrics(_model, _originalData, _data, new Frame(pred));
} else if (_output.nclasses() == 2) {
Vec p1 = preds.vec(2);
return _task._metricBuilder.makeModelMetrics(_model, _originalData, _data, new Frame(p1));
} else {
Frame pp = new Frame(preds);
pp.remove(0);
return _task._metricBuilder.makeModelMetrics(_model, _originalData, _data, pp);
}
} finally {
Scope.exit();
}
}
private Frame runScoreTask() {
_task.doAll(outputTypes(), _data);
final String[] names = Model.makeScoringNames(_output);
return _task.outputFrame(null, names, makeDomains(names));
}
private byte[] outputTypes() {
// Last output is the response, which eventually will be removed before returning the preds Frame but is needed to build metrics
if (_output.nclasses() == 1) {
return new byte[]{T_NUM};
} else if (_output.nclasses() == 2) {
return new byte[]{T_CAT, T_NUM, T_NUM};
} else {
byte[] types = new byte[_output.nclasses() + 1];
Arrays.fill(types, T_NUM);
return types;
}
}
private String[][] makeDomains(String[] names) {
if (_output.nclasses() == 1) {
return null;
} else {
String[][] domains = new String[names.length][];
domains[0] = _output.classNames();
return domains;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostNativeBigScoreChunkPredict.java
|
package hex.tree.xgboost.predict;
import hex.DataInfo;
import hex.Model;
import hex.genmodel.algos.xgboost.XGBoostMojoModel;
import hex.tree.xgboost.*;
import hex.tree.xgboost.util.BoosterHelper;
import ai.h2o.xgboost4j.java.Booster;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.Rabit;
import ai.h2o.xgboost4j.java.XGBoostError;
import org.apache.log4j.Logger;
import water.fvec.Chunk;
import water.fvec.Frame;
import java.util.HashMap;
public class XGBoostNativeBigScoreChunkPredict implements XGBoostPredictContrib, Model.BigScoreChunkPredict {
private static final Logger LOG = Logger.getLogger(XGBoostNativeBigScoreChunkPredict.class);
private final double _threshold;
private final int _responseIndex;
private final int _offsetIndex;
private final XGBoostModelInfo _modelInfo;
private final XGBoostModel.XGBoostParameters _parms;
private final DataInfo _dataInfo;
private final BoosterParms _boosterParms;
private final XGBoostOutput _output;
private final float[][] _preds;
public XGBoostNativeBigScoreChunkPredict(
XGBoostModelInfo modelInfo,
XGBoostModel.XGBoostParameters parms,
DataInfo di,
BoosterParms boosterParms,
double threshold,
XGBoostOutput output,
Frame fr,
Chunk[] chks
) {
_modelInfo = modelInfo;
_parms = parms;
_dataInfo = di;
_boosterParms = boosterParms;
_threshold = threshold;
_output = output;
if (fr.vec(_parms._response_column).isBad()) {
_responseIndex = -1;
} else {
_responseIndex = fr.find(_parms._response_column);
}
_offsetIndex = fr.find(_parms._offset_column);
_preds = scoreChunk(chks, XGBoostPredict.OutputType.PREDICT);
}
@Override
public double[] score0(Chunk[] chks, double offset, int row_in_chunk, double[] tmp, double[] preds) {
for (int i = 0; i < tmp.length; i++) {
tmp[i] = chks[i].atd(row_in_chunk);
}
return XGBoostMojoModel.toPreds(tmp, _preds[row_in_chunk], preds, _output.nclasses(), null, _threshold);
}
@Override
public float[][] predictContrib(Chunk[] cs) {
return scoreChunk(cs, OutputType.PREDICT_CONTRIB_APPROX);
}
@Override
public float[][] predict(Chunk[] cs) {
return scoreChunk(cs, OutputType.PREDICT);
}
private float[][] scoreChunk(final Chunk[] cs, final XGBoostPredict.OutputType outputType) {
DMatrix data = null;
Booster booster = null;
try {
// Rabit has to be initialized as parts of booster.predict() are using Rabit
// This might be fixed in future versions of XGBoost
Rabit.init(new HashMap<>());
data = XGBoostUtils.convertChunksToDMatrix(
_dataInfo,
cs,
_responseIndex,
_output._sparse,
_offsetIndex
);
// No local chunks for this frame
if (data.rowNum() == 0) {
return new float[0][];
}
// Initialize Booster
booster = BoosterHelper.loadModel(_modelInfo._boosterBytes);
booster.setParams(_boosterParms.get());
int treeLimit = 0;
// Predict
float[][] preds;
switch (outputType) {
case PREDICT:
preds = booster.predict(data, false, treeLimit);
break;
case PREDICT_CONTRIB_APPROX:
preds = booster.predictContrib(data, treeLimit);
break;
default:
throw new UnsupportedOperationException("Unsupported output type: " + outputType);
}
return preds == null ? new float[0][] : preds;
} catch (XGBoostError xgBoostError) {
throw new IllegalStateException("Failed to score with XGBoost.", xgBoostError);
} finally {
BoosterHelper.dispose(booster, data);
try {
Rabit.shutdown();
} catch (XGBoostError xgBoostError) {
LOG.error("Failed Rabit shutdown. A hanging RabitTracker task might be present on the driver node.", xgBoostError);
}
}
}
@Override
public void close() {
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostNativeBigScorePredict.java
|
package hex.tree.xgboost.predict;
import hex.DataInfo;
import hex.tree.xgboost.BoosterParms;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.XGBoostOutput;
import water.fvec.Chunk;
import water.fvec.Frame;
public class XGBoostNativeBigScorePredict implements XGBoostBigScorePredict {
private final XGBoostModelInfo _modelInfo;
private final XGBoostModel.XGBoostParameters _parms;
private final XGBoostOutput _output;
private final DataInfo _dataInfo;
private final BoosterParms _boosterParms;
private final double _threshold;
public XGBoostNativeBigScorePredict(
XGBoostModelInfo modelInfo, XGBoostModel.XGBoostParameters parms,
XGBoostOutput output, DataInfo dataInfo, BoosterParms boosterParms,
double threshold
) {
_modelInfo = modelInfo;
_parms = parms;
_output = output;
_dataInfo = dataInfo;
_boosterParms = boosterParms;
_threshold = threshold;
}
@Override
public XGBoostPredict initMap(Frame fr, Chunk[] chks) {
return new XGBoostNativeBigScoreChunkPredict(_modelInfo, _parms, _dataInfo, _boosterParms, _threshold, _output, fr, chks);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostNativeVariableImportance.java
|
package hex.tree.xgboost.predict;
import hex.tree.xgboost.util.BoosterHelper;
import hex.tree.xgboost.util.FeatureScore;
import ai.h2o.xgboost4j.java.Booster;
import ai.h2o.xgboost4j.java.XGBoostError;
import org.apache.log4j.Logger;
import water.Key;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.Map;
public class XGBoostNativeVariableImportance implements XGBoostVariableImportance {
private static final Logger LOG = Logger.getLogger(XGBoostNativeVariableImportance.class);
private final File featureMapFile;
public XGBoostNativeVariableImportance(Key modelKey, String featureMap) {
featureMapFile = createFeatureMapFile(modelKey, featureMap);
}
private File createFeatureMapFile(Key modelKey, String featureMap) {
try {
File fmFile = Files.createTempFile("h2o_xgb_" + modelKey.toString(), ".txt").toFile();
fmFile.deleteOnExit();
try (OutputStream os = new FileOutputStream(fmFile)) {
os.write(featureMap.getBytes());
}
return fmFile;
} catch (IOException e) {
throw new RuntimeException("Cannot generate feature map file", e);
}
}
@Override
public void cleanup() {
if (featureMapFile != null) {
if (!featureMapFile.delete()) {
LOG.warn("Unable to delete file " + featureMapFile + ". Please do a manual clean-up.");
}
}
}
public Map<String, FeatureScore> getFeatureScores(byte[] boosterBytes) {
Booster booster = null;
try {
booster = BoosterHelper.loadModel(boosterBytes);
return BoosterHelper.doWithLocalRabit(new BoosterHelper.BoosterOp<Map<String, FeatureScore>>() {
@Override
public Map<String, FeatureScore> apply(Booster booster) throws XGBoostError {
String fmPath = featureMapFile.getAbsolutePath();
final String[] modelDump = booster.getModelDump(fmPath, true);
return parseFeatureScores(modelDump);
}
}, booster);
} catch (XGBoostError e) {
throw new RuntimeException("Failed getting feature scores.", e);
} finally {
if (booster != null)
BoosterHelper.dispose(booster);
}
}
public static Map<String, FeatureScore> parseFeatureScores(String[] modelDump) {
Map<String, FeatureScore> featureScore = new HashMap<>();
for (String tree : modelDump) {
for (String node : tree.split("\n")) {
String[] array = node.split("\\[", 2);
if (array.length < 2)
continue;
String[] content = array[1].split("\\]", 2);
if (content.length < 2)
continue;
String fid = content[0].split("<")[0];
FeatureScore fs = new FeatureScore();
String[] keyValues = content[1].split(",");
for (String keyValue : keyValues) {
if (keyValue.startsWith(FeatureScore.GAIN_KEY + "=")) {
fs._gain = Float.parseFloat(keyValue.substring(FeatureScore.GAIN_KEY.length() + 1));
} else if (keyValue.startsWith(FeatureScore.COVER_KEY + "=")) {
fs._cover = Float.parseFloat(keyValue.substring(FeatureScore.COVER_KEY.length() + 1));
}
}
if (featureScore.containsKey(fid)) {
featureScore.get(fid).add(fs);
} else {
featureScore.put(fid, fs);
}
}
}
return featureScore;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostPredict.java
|
package hex.tree.xgboost.predict;
import hex.Model;
import water.fvec.Chunk;
public interface XGBoostPredict extends Model.BigScoreChunkPredict {
enum OutputType {PREDICT, PREDICT_CONTRIB_APPROX}
float[][] predict(final Chunk[] cs);
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostPredictContrib.java
|
package hex.tree.xgboost.predict;
import water.fvec.Chunk;
public interface XGBoostPredictContrib extends XGBoostPredict {
float[][] predictContrib(final Chunk[] cs);
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostRegTree.java
|
package hex.tree.xgboost.predict;
import biz.k11i.xgboost.tree.*;
import biz.k11i.xgboost.util.FVec;
import biz.k11i.xgboost.util.ModelReader;
import water.util.UnsafeUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Regression tree.
*/
public class XGBoostRegTree implements RegTree {
private static final int NODE_SIZE = 20;
private static final int STATS_SIZE = 16;
private byte[] _nodes;
/**
* Loads model from stream.
*
* @param reader input stream
* @throws IOException If an I/O error occurs
*/
XGBoostRegTree(ModelReader reader) throws IOException {
final int numNodes = readNumNodes(reader);
_nodes = reader.readByteArray(numNodes * NODE_SIZE);
reader.skip((long) numNodes * STATS_SIZE);
}
@Override
public int getLeafIndex(FVec feat) {
throw new UnsupportedOperationException("Leaf node id assignment is currently not supported");
}
@Override
public void getLeafPath(FVec fVec, StringBuilder stringBuilder) {
throw new UnsupportedOperationException("Leaf node id assignment is currently not supported");
}
/**
* Retrieves nodes from root to leaf and returns leaf value.
*
* @param feat feature vector
* @param root_id starting root index
* @return leaf value
*/
@Override
public final float getLeafValue(FVec feat, int root_id) {
int pid = root_id;
int pos = pid * NODE_SIZE + 4;
int cleft_ = UnsafeUtils.get4(_nodes, pos);
while (cleft_ != -1) {
final int sindex_ = UnsafeUtils.get4(_nodes, pos + 8);
final float fvalue = feat.fvalue((int) (sindex_ & ((1L << 31) - 1L)));
if (Float.isNaN(fvalue)) {
pid = (sindex_ >>> 31) != 0 ? cleft_ : UnsafeUtils.get4(_nodes, pos + 4);
} else {
final float value_ = UnsafeUtils.get4f(_nodes, pos + 12);
pid = (fvalue < value_) ? cleft_ : UnsafeUtils.get4(_nodes, pos + 4);
}
pos = pid * NODE_SIZE + 4;
cleft_ = UnsafeUtils.get4(_nodes, pos);
}
return UnsafeUtils.get4f(_nodes, pos + 12);
}
@Override
public RegTreeNode[] getNodes() {
try (InputStream nodesStream = new ByteArrayInputStream(_nodes)) {
ModelReader reader = new ModelReader(nodesStream);
RegTreeNode[] nodes = new RegTreeNode[_nodes.length / NODE_SIZE];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = NodeHelper.read(reader);
}
return nodes;
} catch (IOException e) {
throw new RuntimeException("Cannot extract nodes from tree", e);
}
}
@Override
public RegTreeNodeStat[] getStats() {
throw new UnsupportedOperationException("Scoring-optimized trees don't contain node stats");
}
private static int readNumNodes(ModelReader reader) throws IOException {
int numRoots = reader.readInt();
assert numRoots == 1;
int numNodes = reader.readInt();
reader.skip(4 * 4 + 31 * 4); // skip {int num_deleted, int max_depth, int num_feature, size_leaf_vector, 31 * reserved int}
return numNodes;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/predict/XGBoostVariableImportance.java
|
package hex.tree.xgboost.predict;
import hex.tree.xgboost.util.FeatureScore;
import java.util.Map;
public interface XGBoostVariableImportance {
Map<String, FeatureScore> getFeatureScores(byte[] boosterBytes);
default void cleanup() {}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit/RabitTrackerH2O.java
|
package hex.tree.xgboost.rabit;
import hex.tree.xgboost.rabit.util.LinkMap;
import ai.h2o.xgboost4j.java.IRabitTracker;
import org.apache.log4j.Logger;
import water.H2O;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.*;
public class RabitTrackerH2O implements IRabitTracker {
private static final Logger LOG = Logger.getLogger(RabitTrackerH2O.class);
public static final int MAGIC = 0xff99;
private ServerSocketChannel sock;
private int port = 9091;
private final int workers;
private final Map<String, String> envs = new HashMap<>();
private volatile RabitTrackerH2OThread trackerThread;
public RabitTrackerH2O(int workers) {
if (workers < 1) {
throw new IllegalStateException("workers must be greater than or equal to one (1).");
}
this.workers = workers;
}
@Override
public Map<String, String> getWorkerEnvs() {
envs.put("DMLC_NUM_WORKER", String.valueOf(workers));
envs.put("DMLC_NUM_SERVER", "0");
envs.put("DMLC_TRACKER_URI", H2O.SELF_ADDRESS.getHostAddress());
envs.put("DMLC_TRACKER_PORT", Integer.toString(port));
envs.put("DMLC_TASK_ID", Integer.toString(H2O.SELF.index()));
envs.put("DMLC_WORKER_STOP_PROCESS_ON_ERROR", "false");
envs.put("rabit_world_size", Integer.toString(workers));
return envs;
}
@Override
public boolean start(long timeout) {
boolean tryToBind = true;
while (tryToBind) {
try {
this.sock = ServerSocketChannel.open();
this.sock.socket().setReceiveBufferSize(64 * 1024);
InetSocketAddress isa = new InetSocketAddress(H2O.SELF_ADDRESS, this.port);
this.sock.socket().bind(isa);
LOG.debug("Started listening on port " + this.port);
tryToBind = false;
} catch (java.io.IOException e) {
this.port++;
try {
this.sock.close();
} catch (IOException socketCloseException) {
LOG.warn("Failed to close Rabit Tracker socket on port " + sock.socket().getLocalPort());
}
if (this.port > 9999) {
throw new RuntimeException("Failed to bind Rabit tracker to a socket in range 9091-9999", e);
}
}
}
if (null != this.trackerThread) {
throw new IllegalStateException("Rabit tracker already started.");
}
this.trackerThread = new RabitTrackerH2OThread(this);
this.trackerThread.start();
return true;
}
@Override
public void stop() {
assert this.trackerThread != null;
try {
this.trackerThread.interrupt();
} catch (SecurityException e){
LOG.error("Could not interrupt a thread in RabitTrackerH2O: " + trackerThread.toString());
}
this.trackerThread.terminateSocketChannels();
this.trackerThread = null;
try {
this.sock.close();
this.port = 9091;
} catch (IOException e) {
LOG.error("Failed to close Rabit tracker socket.", e);
}
}
private static class RabitTrackerH2OThread extends Thread {
private final RabitTrackerH2O tracker;
private final Map<String, Integer> jobToRankMap = new HashMap<>();
private final List<SocketChannel> socketChannels = new ArrayList<>();
private RabitTrackerH2OThread(RabitTrackerH2O tracker) {
setPriority(MAX_PRIORITY-1);
this.setName("TCP-" + tracker.sock);
this.tracker = tracker;
}
private void terminateSocketChannels(){
for (SocketChannel channel : socketChannels) {
try {
channel.close();
} catch (IOException e) {
LOG.warn("Unable to close RabitTracerH2O SocketChannel on port " + channel.socket().getPort());
}
}
}
private static final String PRINT_CMD = "print";
private static final String SHUTDOWN_CMD = "shutdown";
private static final String START_CMD = "start";
private static final String RECOVER_CMD = "recover";
private static final String NULL_STR = "null";
@Override
public void run() {
LOG.debug("Rabit tracker started on port " + tracker.port);
LinkMap linkMap = null;
Set<Integer> shutdown = new HashSet<>();
Map<Integer, RabitWorker> waitConn = new HashMap<>();
List<RabitWorker> pending = new ArrayList<>();
Queue<Integer> todoNodes = new ArrayDeque<>(tracker.workers);
while (!interrupted() && shutdown.size() != tracker.workers) {
try{
final SocketChannel channel = tracker.sock.accept(); // Does not proceed when interrupt() is called.
socketChannels.add(channel);
final RabitWorker worker = new RabitWorker(channel);
if (PRINT_CMD.equals(worker.cmd)) {
String msg = worker.receiver().getStr();
LOG.warn("Rabit worker: " + msg);
continue;
} else if (SHUTDOWN_CMD.equals(worker.cmd)) {
assert worker.rank >= 0 && !shutdown.contains(worker.rank);
assert !waitConn.containsKey(worker.rank);
shutdown.add(worker.rank);
channel.socket().close();
LOG.debug("Received " + worker.cmd + " signal from " + worker.rank);
continue;
}
assert START_CMD.equals(worker.cmd) || RECOVER_CMD.equals(worker.cmd);
if (null == linkMap) {
assert START_CMD.equals(worker.cmd);
linkMap = new LinkMap(tracker.workers);
for (int i = 0; i < tracker.workers; i++) {
todoNodes.add(i);
}
} else {
assert worker.worldSize == -1 || worker.worldSize == tracker.workers;
}
if (RECOVER_CMD.equals(worker.cmd)) {
assert worker.rank >= 0;
}
int rank = worker.decideRank(jobToRankMap);
if (-1 == rank) {
assert todoNodes.size() != 0;
pending.add(worker);
if (pending.size() == todoNodes.size()) {
Collections.sort(pending);
for (RabitWorker p : pending) {
rank = todoNodes.poll();
if (!NULL_STR.equals(p.jobId)) {
jobToRankMap.put(p.jobId, rank);
}
p.assignRank(rank, waitConn, linkMap);
if (p.waitAccept > 0) {
waitConn.put(rank, p);
}
LOG.debug("Received " + p.cmd +
" signal from " + p.host + ":" + p.workerPort +
". Assigned rank " + p.rank
);
}
}
if (todoNodes.isEmpty()) {
LOG.debug("All " + tracker.workers + " Rabit workers are getting started.");
}
} else {
worker.assignRank(rank, waitConn, linkMap);
if (worker.waitAccept > 0) {
waitConn.put(rank, worker);
}
}
} catch (IOException e) {
LOG.error("Exception in Rabit tracker.", e);
}
}
LOG.debug("All Rabit nodes finished.");
}
}
@Override
public int waitFor(long timeout) {
while(null != this.trackerThread && this.trackerThread.isAlive()) {
try {
this.trackerThread.join(timeout);
} catch (InterruptedException e) {
LOG.debug("Rabit tracker thread got suddenly interrupted.", e);
Thread.currentThread().interrupt();
}
}
return 0;
}
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("Uncaught exception occurred on Rabit tracker thread " + t.getName(), e);
stop();
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit/RabitWorker.java
|
package hex.tree.xgboost.rabit;
import hex.tree.xgboost.rabit.communication.XGBoostAutoBuffer;
import hex.tree.xgboost.rabit.util.LinkMap;
import org.apache.log4j.Logger;
import water.AutoBuffer;
import java.io.IOException;
import java.nio.channels.SocketChannel;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class RabitWorker implements Comparable<RabitWorker> {
private static final Logger LOG = Logger.getLogger(RabitWorker.class);
final String host;
final int workerPort;
private SocketChannel socket;
int rank;
int worldSize;
String jobId;
public String cmd;
int waitAccept;
private int port;
private XGBoostAutoBuffer ab;
private XGBoostAutoBuffer writerAB;
RabitWorker(SocketChannel channel) throws IOException {
this.ab = new XGBoostAutoBuffer(channel);
this.socket = channel;
this.host = channel.socket().getInetAddress().getHostAddress();
this.workerPort = channel.socket().getPort();
int magicReceived = ab.get4();
if(RabitTrackerH2O.MAGIC != magicReceived) {
throw new IllegalStateException(
"Tracker received wrong magic number ["
+ magicReceived +
"] from host " + this.host
);
}
writerAB = new XGBoostAutoBuffer();
writerAB.put4(RabitTrackerH2O.MAGIC);
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
this.rank = ab.get4();
this.worldSize = ab.get4();
this.jobId = safeLowercase(ab.getStr());
this.cmd = safeLowercase(ab.getStr());
this.waitAccept = 0;
this.port = -1;
LOG.debug("Initialized worker " + this.host + " with rank " + this.rank + " and command [" + this.cmd + "].");
}
private String safeLowercase(String str) {
return null == str ? null : str.toLowerCase();
}
int decideRank(Map<String, Integer> jobToRankMap) {
if (rank >= 0) {
return rank;
}
if (!"null".equals(jobId) && jobToRankMap.containsKey(jobId)) {
return jobToRankMap.get(jobId);
}
return -1;
}
public XGBoostAutoBuffer receiver() {
return ab;
}
public void assignRank(int rank, Map<Integer, RabitWorker> waitConn, LinkMap linkMap) throws IOException {
this.rank = rank;
List<Integer> nnset = linkMap.treeMap.get(rank);
Integer rprev = linkMap.ringMap.get(rank)._1();
Integer rnext = linkMap.ringMap.get(rank)._2();
writerAB.put4(rank);
writerAB.put4(linkMap.parentMap.get(rank));
writerAB.put4(linkMap.treeMap.size());
writerAB.put4(nnset.size());
for (Integer r : nnset) {
writerAB.put4(r);
}
if (rprev != -1 && rprev != rank) {
nnset.add(rprev);
writerAB.put4(rprev);
} else {
writerAB.put4(-1);
}
if (rnext != -1 && rnext != rank) {
nnset.add(rnext);
writerAB.put4(rnext);
} else {
writerAB.put4(-1);
}
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
while (true) {
int ngood = ab.get4();
Set<Integer> goodSet = new LinkedHashSet<>();
for(int i = 0; i < ngood; i++) {
int got = ab.get4();
goodSet.add(got);
}
assert nnset.containsAll(goodSet);
Set<Integer> badSet = new LinkedHashSet<>(nnset);
badSet.removeAll(goodSet);
Set<Integer> conset = new LinkedHashSet<>();
for (Integer r : badSet) {
if(waitConn.containsKey(r)) {
conset.add(r);
}
}
writerAB.put4(conset.size());
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
writerAB.put4(badSet.size() - conset.size());
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
for (Integer r : conset) {
writerAB.putStr(waitConn.get(r).host);
writerAB.put4(waitConn.get(r).port);
writerAB.put4(r);
AutoBuffer.writeToChannel(writerAB.buffer(), socket);
}
int nerr = ab.get4();
if(nerr != 0) {
continue;
}
this.port = ab.get4();
Set<Integer> rmset = new LinkedHashSet<>();
// All connections were successfully setup
for (Integer r : conset) {
waitConn.get(r).waitAccept -= 1;
if(waitConn.get(r).waitAccept == 0) {
rmset.add(r);
}
}
for (Integer r : rmset) {
waitConn.remove(r);
}
this.waitAccept = badSet.size() - conset.size();
return;
}
}
@Override
public int compareTo(RabitWorker o) {
return jobId.compareTo(o.jobId);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit/communication/XGBoostAutoBuffer.java
|
package hex.tree.xgboost.rabit.communication;
import water.AutoBuffer;
import water.util.StringUtils;
import java.io.IOException;
import java.nio.channels.SocketChannel;
import java.nio.charset.Charset;
// Wrapper class for communication with XGBoost cluster. XGBoost cluster does not use int compression
// when sending/receiving string length. Other basic functionality seems to be the same.
public class XGBoostAutoBuffer {
private AutoBuffer ab;
private static final Charset UTF_8 = Charset.forName("UTF-8");
public XGBoostAutoBuffer(SocketChannel channel) throws IOException {
this.ab = new AutoBuffer(channel);
}
public XGBoostAutoBuffer() {
this.ab = new AutoBuffer();
}
// Used to communicate with external frameworks, for example XGBoost
public String getStr( ) {
int len = ab.get4();
return len == -1 ? null : new String(ab.getA1(len), UTF_8);
}
// Used to communicate with external frameworks, for example XGBoost
public XGBoostAutoBuffer putStr(String s ) {
ab.put4(s.length());
byte[] a = StringUtils.bytesOf(s);
ab.putA1(a, a.length);
return this;
}
public int get4() {
return ab.get4();
}
public void put4(int number) {
ab.put4(number);
}
public AutoBuffer buffer() {
return ab;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit/util/AssignedRank.java
|
package hex.tree.xgboost.rabit.util;
import water.util.Pair;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.List;
/**
* Java implementation of ai.h2o.xgboost4j.scala.rabit.util.AssignedRank
*/
public class AssignedRank {
private int rank;
private List<Integer> neighbours;
private Pair<Integer, Integer> ring;
private int parent;
public AssignedRank(int rank, List<Integer> neighbours, Pair<Integer, Integer> ring, int parent) {
this.rank = rank;
this.neighbours = neighbours;
this.ring = ring;
this.parent = parent;
}
public ByteBuffer toByteBuffer(int worldSize) {
ByteBuffer buffer = ByteBuffer.allocate(4 * (neighbours.size() + 6)).order(ByteOrder.nativeOrder());
buffer.putInt(rank).putInt(parent).putInt(worldSize).putInt(neighbours.size());
// neighbors in tree structure
for(Integer n : neighbours) {
buffer.putInt(n);
}
buffer.putInt((ring._1() != -1 && ring._1() != rank) ? ring._1() : -1);
buffer.putInt((ring._2() != -1 && ring._2() != rank) ? ring._2() : -1);
buffer.flip();
return buffer;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/rabit/util/LinkMap.java
|
package hex.tree.xgboost.rabit.util;
import water.util.Pair;
import java.util.*;
/**
* Java implementation of ai.h2o.xgboost4j.scala.rabit.util.LinkMap
*
* Naming left for consistency. In reality this is a simple binary tree data structure, which is used for communication
* between Rabit workers.
*
*/
public class LinkMap {
private int numWorkers;
public Map<Integer, List<Integer>> treeMap = new LinkedHashMap<>();
public Map<Integer, Integer> parentMap = new LinkedHashMap<>();
public Map<Integer, Pair<Integer, Integer>> ringMap = new LinkedHashMap<>();
public LinkMap(int numWorkers) {
this.numWorkers = numWorkers;
Map<Integer, List<Integer>> treeMap_ = initTreeMap();
Map<Integer, Integer> parentMap_ = initParentMap();
Map<Integer, Pair<Integer, Integer>> ringMap_ = constructRingMap(treeMap_, parentMap_);
Map<Integer, Integer> rMap_ = new LinkedHashMap<>(numWorkers - 1);
rMap_.put(0, 0);
int k = 0;
for(int i = 0; i < numWorkers - 1; i++) {
int kNext = ringMap_.get(k)._2();
k = kNext;
rMap_.put(kNext, (i + 1));
}
for (Map.Entry<Integer, Pair<Integer, Integer>> kv : ringMap_.entrySet()) {
this.ringMap.put(
rMap_.get(kv.getKey()),
new Pair<>(rMap_.get(kv.getValue()._1()), rMap_.get(kv.getValue()._2()))
);
}
for (Map.Entry<Integer, List<Integer>> kv : treeMap_.entrySet()) {
List<Integer> mapped = new ArrayList<>(kv.getValue().size());
for(Integer v : kv.getValue()) {
mapped.add(rMap_.get(v));
}
treeMap.put(
rMap_.get(kv.getKey()),
mapped
);
}
for (Map.Entry<Integer, Integer> kv : parentMap_.entrySet()) {
if(kv.getKey() == 0) {
parentMap.put(rMap_.get(kv.getKey()), -1);
} else {
parentMap.put(rMap_.get(kv.getKey()), rMap_.get(kv.getValue()));
}
}
}
/**
* Generates a mapping node -> neighbours(node)
*/
Map<Integer, List<Integer>> initTreeMap() {
Map<Integer, List<Integer>> treeMap = new LinkedHashMap<>(numWorkers);
for(int r = 0; r < numWorkers; r++) {
treeMap.put(r, getNeighbours(r));
}
return treeMap;
}
/**
* Generates a mapping node -> parent (parent of root is -1)
*/
Map<Integer, Integer> initParentMap() {
Map<Integer, Integer> parentMap = new LinkedHashMap<>(numWorkers);
for(int r = 0; r < numWorkers; r++) {
parentMap.put(r, ((r + 1) / 2 - 1) );
}
return parentMap;
}
/**
* Returns a list containing existing neighbours of a node, this includes at most 3 nodes: parent, left and right child.
*/
List<Integer> getNeighbours(int rank) {
if(rank < 0) {
throw new IllegalStateException("Rank should be non negative");
}
if(rank >= numWorkers) {
throw new IllegalStateException("Rank ["+rank+"] too high for the number of workers ["+numWorkers+"]");
}
rank += 1;
List<Integer> neighbour = new ArrayList<>();
if(rank > 1) {
neighbour.add(rank / 2 - 1);
}
if(rank * 2 - 1 < numWorkers) {
neighbour.add(rank * 2 - 1);
}
if(rank * 2 < numWorkers) {
neighbour.add(rank * 2);
}
return neighbour;
}
/**
* Returns a DFS (root, DFS(left_child), DFS(right)child) order from root with given rank.
*/
List<Integer> constructShareRing(Map<Integer, List<Integer>> treeMap,
Map<Integer, Integer> parentMap,
int rank) {
Set<Integer> connectionSet = new LinkedHashSet<>(treeMap.get(rank));
connectionSet.remove(parentMap.get(rank));
if(connectionSet.isEmpty()) {
return Collections.singletonList(rank);
} else {
List<Integer> ringSeq = new LinkedList<>();
ringSeq.add(rank);
int cnt = 0;
for(Integer n : connectionSet) {
List<Integer> vConnSeq = constructShareRing(treeMap, parentMap, n);
cnt++;
if(connectionSet.size() == cnt) {
Collections.reverse(vConnSeq);
}
ringSeq.addAll(vConnSeq);
}
return ringSeq;
}
}
/**
* Returns for each node with "rank" the previous and next node in DFS order. For the root the "previous"
* entry will be the last element, which will create a ring type structure.
*/
Map<Integer, Pair<Integer, Integer>> constructRingMap(Map<Integer, List<Integer>> treeMap,
Map<Integer, Integer> parentMap) {
assert parentMap.get(0) == -1;
List<Integer> sharedRing = constructShareRing(treeMap, parentMap, 0);
assert sharedRing.size() == treeMap.size();
Map<Integer, Pair<Integer, Integer>> ringMap = new LinkedHashMap<>(numWorkers);
for(int r = 0; r < numWorkers; r++) {
int rPrev = (r + numWorkers - 1) % numWorkers;
int rNext = (r + 1) % numWorkers;
ringMap.put(sharedRing.get(r), new Pair<>(sharedRing.get(rPrev), sharedRing.get(rNext)));
}
return ringMap;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/remote/RemoteXGBoostHandler.java
|
package hex.tree.xgboost.remote;
import hex.genmodel.utils.IOUtils;
import hex.schemas.XGBoostExecReqV3;
import hex.schemas.XGBoostExecRespV3;
import hex.tree.xgboost.EvalMetric;
import hex.tree.xgboost.exec.LocalXGBoostExecutor;
import hex.tree.xgboost.exec.XGBoostExecReq;
import org.apache.log4j.Logger;
import water.BootstrapFreezable;
import water.H2O;
import water.Iced;
import water.TypeMap;
import water.api.Handler;
import water.api.StreamingSchema;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import static hex.tree.xgboost.remote.XGBoostExecutorRegistry.*;
public class RemoteXGBoostHandler extends Handler {
private static final Logger LOG = Logger.getLogger(RemoteXGBoostHandler.class);
private XGBoostExecRespV3 makeResponse(LocalXGBoostExecutor exec) {
return new XGBoostExecRespV3(exec.modelKey);
}
@SuppressWarnings("unused")
public XGBoostExecRespV3 init(int ignored, XGBoostExecReqV3 req) {
XGBoostExecReq.Init init = req.readData();
LocalXGBoostExecutor exec = new LocalXGBoostExecutor(req.key.key(), init);
storeExecutor(exec);
return new XGBoostExecRespV3(exec.modelKey, collectNodes());
}
public static class RemoteExecutors extends Iced<RemoteExecutors> implements BootstrapFreezable<RemoteExecutors> {
public final String[] _nodes;
public final String[] _typeMap;
public RemoteExecutors(String[] nodes) {
_nodes = nodes;
_typeMap = TypeMap.bootstrapClasses();
}
}
private RemoteExecutors collectNodes() {
String[] nodes = new String[H2O.CLOUD.size()];
for (int i = 0; i < nodes.length; i++) {
nodes[i] = H2O.CLOUD.members()[i].getIpPortString();
}
return new RemoteExecutors(nodes);
}
@SuppressWarnings("unused")
public StreamingSchema setup(int ignored, XGBoostExecReqV3 req) {
LocalXGBoostExecutor exec = getExecutor(req);
byte[] booster = exec.setup();
return streamBytes(booster);
}
@SuppressWarnings("unused")
public XGBoostExecRespV3 update(int ignored, XGBoostExecReqV3 req) {
LocalXGBoostExecutor exec = getExecutor(req);
XGBoostExecReq.Update update = req.readData();
exec.update(update.treeId);
return makeResponse(exec);
}
@SuppressWarnings("unused")
public XGBoostExecRespV3 getEvalMetric(int ignored, XGBoostExecReqV3 req) {
LocalXGBoostExecutor exec = getExecutor(req);
EvalMetric evalMetric = exec.getEvalMetric();
return new XGBoostExecRespV3(exec.modelKey, evalMetric);
}
@SuppressWarnings("unused")
public StreamingSchema getBooster(int ignored, XGBoostExecReqV3 req) {
LocalXGBoostExecutor exec = getExecutor(req);
byte[] booster = exec.updateBooster();
return streamBytes(booster);
}
@SuppressWarnings("unused")
public XGBoostExecRespV3 cleanup(int ignored, XGBoostExecReqV3 req) {
LocalXGBoostExecutor exec = getExecutor(req);
exec.close();
removeExecutor(exec);
return makeResponse(exec);
}
private StreamingSchema streamBytes(byte[] data) {
final byte[] dataToSend;
if (data == null) dataToSend = new byte[0];
else dataToSend = data;
return new StreamingSchema((os, options) -> {
try {
IOUtils.copyStream(new ByteArrayInputStream(dataToSend), os);
} catch (IOException e) {
LOG.error("Failed writing data to response.", e);
throw new RuntimeException("Failed writing data to response.", e);
}
});
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/remote/RemoteXGBoostServletProvider.java
|
package hex.tree.xgboost.remote;
import water.server.ServletMeta;
import water.server.ServletProvider;
import java.util.Collections;
import java.util.List;
public class RemoteXGBoostServletProvider implements ServletProvider {
private static final List<ServletMeta> SERVLETS = Collections.singletonList(
new ServletMeta.Builder("/3/XGBoostExecutor.upload", RemoteXGBoostUploadServlet.class)
.withAlwaysEnabled(true) // for external XGBoost on K8s (when LeaderNodeRequestFilter is enabled)
.build()
);
@Override
public List<ServletMeta> servlets() {
return SERVLETS;
}
@Override
public int priority() {
return 0;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/remote/RemoteXGBoostUploadServlet.java
|
package hex.tree.xgboost.remote;
import hex.genmodel.utils.IOUtils;
import hex.schemas.XGBoostExecRespV3;
import hex.tree.xgboost.matrix.RemoteMatrixLoader;
import hex.tree.xgboost.matrix.SparseMatrixDimensions;
import hex.tree.xgboost.task.XGBoostUploadMatrixTask;
import org.apache.log4j.Logger;
import water.*;
import water.server.ServletUtils;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
public class RemoteXGBoostUploadServlet extends HttpServlet {
private static final Logger LOG = Logger.getLogger(RemoteXGBoostUploadServlet.class);
public static File getUploadDir(String key) {
return new File(H2O.ICE_ROOT.toString(), key);
}
public static File getCheckpointFile(String key) {
File uploadDir = getUploadDir(key);
if (uploadDir.mkdirs()) {
LOG.debug("Created temporary directory " + uploadDir);
}
return new File(getUploadDir(key), "checkpoint.bin");
}
public enum RequestType {
checkpoint,
matrixTrain,
matrixValid
}
public enum MatrixRequestType {
sparseMatrixDimensions,
sparseMatrixChunk,
denseMatrixDimensions,
denseMatrixChunk,
matrixData
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) {
String uri = ServletUtils.getDecodedUri(request);
try {
String modelKey = request.getParameter("model_key");
String requestType = request.getParameter("request_type");
LOG.info("Upload request for " + modelKey + " " + requestType + " received");
RequestType type = RequestType.valueOf(requestType);
if (type == RequestType.checkpoint) {
File destFile = getCheckpointFile(modelKey);
saveIntoFile(destFile, request);
} else if (type == RequestType.matrixTrain || type == RequestType.matrixValid) {
Key<?> key = Key.make(modelKey);
MatrixRequestType matrixRequestType = MatrixRequestType.valueOf(request.getParameter("data_type"));
String matrixKey = type == RequestType.matrixTrain ?
RemoteMatrixLoader.trainMatrixKey(key) : RemoteMatrixLoader.validMatrixKey(key);
handleMatrixRequest(matrixKey, matrixRequestType, request);
}
response.setContentType("application/json");
response.getWriter().write(new XGBoostExecRespV3(Key.make(modelKey)).toJsonString());
} catch (Exception e) {
ServletUtils.sendErrorResponse(response, e, uri);
} finally {
ServletUtils.logRequest("POST", request, response);
}
}
private void handleMatrixRequest(String matrixKey, MatrixRequestType type, HttpServletRequest request) throws IOException {
BootstrapFreezable<?> requestData;
try (AutoBuffer ab = new AutoBuffer(request.getInputStream(), TypeMap.bootstrapClasses())) {
requestData = ab.get();
}
switch (type) {
case sparseMatrixDimensions:
RemoteMatrixLoader.initSparse(matrixKey, (SparseMatrixDimensions) requestData);
break;
case sparseMatrixChunk:
RemoteMatrixLoader.sparseChunk(matrixKey, (XGBoostUploadMatrixTask.SparseMatrixChunk) requestData);
break;
case denseMatrixDimensions:
RemoteMatrixLoader.initDense(matrixKey, (XGBoostUploadMatrixTask.DenseMatrixDimensions) requestData);
break;
case denseMatrixChunk:
RemoteMatrixLoader.denseChunk(matrixKey, (XGBoostUploadMatrixTask.DenseMatrixChunk) requestData);
break;
case matrixData:
RemoteMatrixLoader.matrixData(matrixKey, (XGBoostUploadMatrixTask.MatrixData) requestData);
break;
default:
throw new IllegalArgumentException("Unexpected request type: " + type);
}
}
private void saveIntoFile(File destFile, HttpServletRequest request) throws IOException {
LOG.debug("Saving contents into " + destFile);
InputStream is = request.getInputStream();
try (FileOutputStream fos = new FileOutputStream(destFile)) {
IOUtils.copyStream(is, fos);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/remote/SteamExecutorStarter.java
|
package hex.tree.xgboost.remote;
import hex.steam.SteamMessageSender;
import hex.steam.SteamMessenger;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.exec.RemoteXGBoostExecutor;
import org.apache.log4j.Logger;
import water.H2O;
import water.Job;
import water.Key;
import water.fvec.Frame;
import java.io.IOException;
import java.util.*;
public class SteamExecutorStarter implements SteamMessenger {
private static final Logger LOG = Logger.getLogger(SteamExecutorStarter.class);
/**
* Initialized by Service lookup
*/
private static SteamExecutorStarter instance;
public static SteamExecutorStarter getInstance() {
return instance;
}
private static class ClusterInfo {
final String uri;
final String userName;
final String password;
private ClusterInfo(String uri, String userName, String password) {
this.uri = uri;
this.userName = userName;
this.password = password;
}
}
private final Object sendingLock = new Object[0];
private final Object clusterLock = new Object[0];
private SteamMessageSender sender;
private ClusterInfo cluster;
private final Deque<Map<String, String>> receivedMessages = new LinkedList<>();
public SteamExecutorStarter() {
instance = this;
}
public RemoteXGBoostExecutor getRemoteExecutor(XGBoostModel model, Frame train, Frame valid, Job<XGBoostModel> job) throws IOException {
ClusterInfo clusterInfo = ensureClusterStarted(model._key, job);
return makeExecutor(model, train, valid, clusterInfo);
}
public void startCluster(Key<XGBoostModel> key, Job<XGBoostModel> job) throws IOException {
ensureClusterStarted(key, job);
}
private ClusterInfo ensureClusterStarted(Key<XGBoostModel> key, Job<XGBoostModel> job) throws IOException {
synchronized (clusterLock) {
if (cluster == null) {
LOG.info("Starting external cluster for model " + key + ".");
startCluster(job);
} else {
LOG.info("External cluster available, starting model " + key + " now.");
}
return cluster;
}
}
private void startCluster(Job<XGBoostModel> job) throws IOException {
clearMessages();
Map<String, String> startRequest = makeStartRequest();
sendMessage(startRequest);
while (!job.stop_requested()) {
Map<String, String> response = waitForMessage();
if (response != null) {
if ("started".equals(response.get("status"))) {
String remoteUri = response.get("uri");
String userName = response.get("user");
String password = response.get("password");
cluster = new ClusterInfo(remoteUri, userName, password);
LOG.info("External cluster started at " + remoteUri + ".");
break;
} else if ("starting".equals(response.get("status"))) {
LOG.info("Continuing to wait for external cluster to start.");
} else if ("failed".equals(response.get("status"))) {
throw new IllegalStateException("Failed to start external cluster: " + response.get("reason"));
} else {
throw new IllegalStateException(
"Unknown status received from steam: " + response.get("status") + ", reason:" + response.get("reason")
);
}
} else {
throw new IllegalStateException("No response received from Steam.");
}
}
}
private static RemoteXGBoostExecutor makeExecutor(XGBoostModel model, Frame train, Frame valid, ClusterInfo cluster) {
return new RemoteXGBoostExecutor(model, train, valid, cluster.uri, cluster.userName, cluster.password);
}
private void clearMessages() {
synchronized (receivedMessages) {
receivedMessages.clear();
}
}
private Map<String, String> waitForMessage() {
int timeout = Integer.parseInt(H2O.getSysProperty("steam.notification.timeout", "20000"));
synchronized (receivedMessages) {
if (!receivedMessages.isEmpty()) {
return receivedMessages.pop();
}
try {
receivedMessages.wait(timeout);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (!receivedMessages.isEmpty()) {
return receivedMessages.pop();
} else {
return null;
}
}
}
@Override
public void onConnectionStateChange(SteamMessageSender sender) {
synchronized (sendingLock) {
this.sender = sender;
}
}
private void sendMessage(Map<String, String> message) throws IOException {
synchronized (sendingLock) {
if (this.sender != null) {
sender.sendMessage(message);
} else {
throw new IOException("Steam communication chanel is not open.");
}
}
}
@Override
public void onMessage(Map<String, String> message) {
if ("stopXGBoostClusterNotification".equals(message.get(TYPE))) {
handleStopRequest(message);
} else if ("xgboostClusterStartNotification".equals(message.get(TYPE))) {
queueResponse(message);
} else {
LOG.debug("Ignoring message " + message.get(ID) + " " + message.get(TYPE));
}
}
private void queueResponse(Map<String, String> message) {
synchronized (receivedMessages) {
LOG.info("Received message response " + message.get(ID));
receivedMessages.add(message);
receivedMessages.notifyAll();
}
}
private void handleStopRequest(Map<String, String> message) {
LOG.info("Received stop request " + message.get(ID));
boolean xgBoostInProgress = isXGBoostInProgress();
if (xgBoostInProgress) {
LOG.info("Responding to stop request with allowed=false");
sendStopResponse(message, false);
} else {
synchronized (clusterLock) {
LOG.info("Responding to stop request with allowed=true");
sendStopResponse(message, true);
cluster = null;
}
}
}
private void sendStopResponse(Map<String, String> request, boolean allow) {
try {
sendMessage(makeStopConfirmation(request, allow));
} catch (IOException e) {
LOG.error("Failed to send stop cluster response.", e);
}
}
private boolean isXGBoostInProgress() {
return Arrays.stream(Job.jobs())
.anyMatch(job -> job.isRunning() && job._result.get() instanceof XGBoostModel);
}
private Map<String, String> makeStartRequest() {
Map<String, String> req = new HashMap<>();
req.put(TYPE, "startXGBoostCluster");
req.put(ID, H2O.SELF.getIpPortString() + "_startXGBoost");
return req;
}
private Map<String, String> makeStopConfirmation(Map<String, String> message, boolean allow) {
Map<String, String> req = new HashMap<>();
req.put(TYPE, "stopXGBoostClusterConfirmation");
req.put(ID, message.get(ID) + "_response");
req.put("allowed", Boolean.toString(allow));
return req;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/remote/XGBoostExecutorRegistry.java
|
package hex.tree.xgboost.remote;
import hex.schemas.XGBoostExecReqV3;
import hex.tree.xgboost.exec.LocalXGBoostExecutor;
import water.Key;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class XGBoostExecutorRegistry {
private static final Map<Key, LocalXGBoostExecutor> REGISTRY = new ConcurrentHashMap<>();
public static LocalXGBoostExecutor getExecutor(XGBoostExecReqV3 req) {
return REGISTRY.get(req.key.key());
}
public static void storeExecutor(LocalXGBoostExecutor exec) {
REGISTRY.put(exec.modelKey, exec);
}
public static void removeExecutor(LocalXGBoostExecutor exec) {
REGISTRY.remove(exec.modelKey);
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/AbstractXGBoostTask.java
|
package hex.tree.xgboost.task;
import hex.tree.xgboost.XGBoostExtension;
import water.*;
public abstract class AbstractXGBoostTask<T extends MRTask<T>> extends MRTask<T> {
final Key _modelKey;
private final boolean[] _hasDMatrix;
AbstractXGBoostTask(AbstractXGBoostTask<?> setupTask) {
this(setupTask._modelKey, setupTask._hasDMatrix);
}
AbstractXGBoostTask(Key modelKey, boolean[] hasDMatrix) {
_modelKey = modelKey;
_hasDMatrix = hasDMatrix;
}
@Override
protected final void setupLocal() {
assert _fr == null : "MRTask invoked on a Frame with no intention to run map() on Chunks might not invoke reduce(); " +
"use doAllNodes() to make sure reduce() will be called.";
if (H2O.ARGS.client) {
return;
}
if (!_hasDMatrix[H2O.SELF.index()])
return;
// We need to verify that the xgboost is available on the remote node
if (!ExtensionManager.getInstance().isCoreExtensionEnabled(XGBoostExtension.NAME)) {
throw new IllegalStateException("XGBoost is not available on the node " + H2O.SELF);
}
// Do the work
execute();
}
abstract void execute();
/**
* Alias to doAllNodes() - the XGBoost task will actually run only on selected nodes. We use doAllNodes() to
* make sure the reduce() operations defined on the tasks will be invoked even if there was no work done on the node
* from MRTask's point of view.
*/
public T run() {
return doAllNodes();
}
H2ONode getBoosterNode() {
for (int i = 0; i < H2O.CLOUD.size(); i++) {
if (_hasDMatrix[i])
return H2O.CLOUD._memary[i];
}
throw new IllegalStateException("No node of the cluster is holding a Booster");
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostCleanupTask.java
|
package hex.tree.xgboost.task;
import hex.tree.xgboost.matrix.RemoteMatrixLoader;
/**
* Cleans up after XGBoost training
*/
public class XGBoostCleanupTask extends AbstractXGBoostTask<XGBoostCleanupTask> {
private XGBoostCleanupTask(XGBoostSetupTask setupTask) {
super(setupTask);
}
@Override
protected void execute() {
XGBoostUpdater.terminate(_modelKey);
RemoteMatrixLoader.cleanup(_modelKey.toString());
}
public static void cleanUp(XGBoostSetupTask setupTask) {
new XGBoostCleanupTask(setupTask).doAllNodes();
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostScoreTask.java
|
package hex.tree.xgboost.task;
import hex.*;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostOutput;
import hex.tree.xgboost.predict.XGBoostBigScorePredict;
import hex.tree.xgboost.predict.XGBoostPredict;
import water.MRTask;
import water.MemoryManager;
import water.fvec.Chunk;
import water.fvec.NewChunk;
import water.udf.CFuncRef;
public class XGBoostScoreTask extends CMetricScoringTask<XGBoostScoreTask> { // used to score model metrics
private final XGBoostOutput _output;
private final int _weightsChunkId;
private final XGBoostModel _model;
private final boolean _isTrain;
private final double _threshold;
public ModelMetrics.MetricBuilder _metricBuilder;
private transient XGBoostBigScorePredict _predict;
public XGBoostScoreTask(
final XGBoostOutput output,
final int weightsChunkId,
final boolean isTrain,
final XGBoostModel model,
CFuncRef customMetricFunc
) {
super(customMetricFunc);
_output = output;
_weightsChunkId = weightsChunkId;
_model = model;
_isTrain = isTrain;
_threshold = model.defaultThreshold();
}
/**
* Constructs a MetricBuilder for this XGBoostScoreTask based on parameters of response variable
*
* @param responseClassesNum Number of classes found in response variable
* @param responseDomain Specific domains in response variable
* @return An instance of {@link hex.ModelMetrics.MetricBuilder} corresponding to given response variable type
*/
private ModelMetrics.MetricBuilder createMetricsBuilder(final int responseClassesNum, final String[] responseDomain) {
switch (responseClassesNum) {
case 1:
return new ModelMetricsRegression.MetricBuilderRegression();
case 2:
return new ModelMetricsBinomial.MetricBuilderBinomial(responseDomain);
default:
return new ModelMetricsMultinomial.MetricBuilderMultinomial(responseClassesNum, responseDomain, this._model._parms._auc_type);
}
}
@Override
protected void setupLocal() {
super.setupLocal();
_predict = _model.setupBigScorePredict(_isTrain);
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
_metricBuilder = createMetricsBuilder(_output.nclasses(), _output.classNames());
final XGBoostPredict predictor = _predict.initMap(_fr, cs);
final float[][] preds = predictor.predict(cs);
if (preds.length == 0) return;
assert preds.length == cs[0]._len;
final Chunk responseChunk = cs[_output.responseIdx()];
if (_output.nclasses() == 1) {
double[] currentPred = new double[1];
float[] yact = new float[1];
for (int j = 0; j < preds.length; ++j) {
currentPred[0] = preds[j][0];
yact[0] = (float) responseChunk.atd(j);
double weight = _weightsChunkId != -1 ? cs[_weightsChunkId].atd(j) : 1; // If there is no chunk with weights, the weight is considered to be 1
_metricBuilder.perRow(currentPred, yact, weight, 0, _model);
customMetricPerRow(currentPred, yact, weight, 0, _model);
}
for (int i = 0; i < cs[0]._len; ++i) {
ncs[0].addNum(preds[i][0]);
}
} else if (_output.nclasses() == 2) {
double[] row = new double[3];
float[] yact = new float[1];
for (int i = 0; i < cs[0]._len; ++i) {
final double p = preds[i][0];
row[1] = 1 - p;
row[2] = p;
row[0] = hex.genmodel.GenModel.getPrediction(row, _output._priorClassDist, null, _threshold);
ncs[0].addNum(row[0]);
ncs[1].addNum(row[1]);
ncs[2].addNum(row[2]);
double weight = _weightsChunkId != -1 ? cs[_weightsChunkId].atd(i) : 1; // If there is no chunk with weights, the weight is considered to be 1
yact[0] = (float) responseChunk.atd(i);
_metricBuilder.perRow(row, yact, weight, 0, _model);
customMetricPerRow(row, yact, weight, 0, _model);
}
} else {
float[] yact = new float[1];
double[] row = MemoryManager.malloc8d(ncs.length);
for (int i = 0; i < cs[0]._len; ++i) {
for (int j = 1; j < row.length; ++j) {
double val = preds[i][j - 1];
ncs[j].addNum(val);
row[j] = val;
}
row[0] = hex.genmodel.GenModel.getPrediction(row, _output._priorClassDist, null, _threshold);
ncs[0].addNum(row[0]);
yact[0] = (float) responseChunk.atd(i);
double weight = _weightsChunkId != -1 ? cs[_weightsChunkId].atd(i) : 1; // If there is no chunk with weights, the weight is considered to be 1
_metricBuilder.perRow(row, yact, weight, 0, _model);
customMetricPerRow(row, yact, weight, 0, _model);
}
}
}
@Override
public void reduce(XGBoostScoreTask mrt) {
super.reduce(mrt);
_metricBuilder.reduce(mrt._metricBuilder);
}
@Override protected void postGlobal() {
super.postGlobal();
if(_metricBuilder != null) {
_metricBuilder.postGlobal(getComputedCustomMetric());
if (null != cFuncRef)
_metricBuilder._CMetricScoringTask = this;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostSetupTask.java
|
package hex.tree.xgboost.task;
import hex.tree.xgboost.BoosterParms;
import hex.tree.xgboost.matrix.MatrixLoader;
import ai.h2o.xgboost4j.java.DMatrix;
import ai.h2o.xgboost4j.java.XGBoostError;
import org.apache.log4j.Logger;
import water.H2O;
import water.Key;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.IcedHashMapGeneric;
import java.io.File;
import java.util.Map;
/**
* Initializes XGBoost training (converts Frame to set of node-local DMatrices)
*/
public class XGBoostSetupTask extends AbstractXGBoostTask<XGBoostSetupTask> {
private static final Logger LOG = Logger.getLogger(XGBoostSetupTask.class);
private final BoosterParms _boosterParms;
private final byte[] _checkpoint;
private final IcedHashMapGeneric.IcedHashMapStringString _rabitEnv;
private final MatrixLoader _matrixLoader;
private final String _saveMatrixDirectory;
public XGBoostSetupTask(
Key modelKey, String saveMatrixDirectory, BoosterParms boosterParms,
byte[] checkpointToResume, Map<String, String> rabitEnv, boolean[] nodes,
MatrixLoader matrixLoader
) {
super(modelKey, nodes);
_boosterParms = boosterParms;
_checkpoint = checkpointToResume;
_matrixLoader = matrixLoader;
_saveMatrixDirectory = saveMatrixDirectory;
(_rabitEnv = new IcedHashMapGeneric.IcedHashMapStringString()).putAll(rabitEnv);
}
@Override
protected void execute() {
DMatrix trainMatrix, validMatrix = null;
try {
trainMatrix = _matrixLoader.makeLocalTrainMatrix().get();
} catch (XGBoostError e) {
throw new IllegalStateException("Failed to create XGBoost DMatrix for training dataset", e);
}
if (_matrixLoader.hasValidationFrame()) {
try {
validMatrix = _matrixLoader.makeLocalValidMatrix().get();
} catch (XGBoostError e) {
throw new IllegalStateException("Failed to create XGBoost DMatrix for validation dataset", e);
}
}
if (_saveMatrixDirectory != null) {
File directory = new File(_saveMatrixDirectory);
if (directory.mkdirs()) {
LOG.debug("Created directory for matrix export: " + directory.getAbsolutePath());
}
File trainPath = new File(directory, "train_matrix.part" + H2O.SELF.index());
LOG.info("Saving node-local portion of XGBoost training dataset to " + trainPath.getAbsolutePath() + ".");
trainMatrix.saveBinary(trainPath.getAbsolutePath());
if (validMatrix != null) {
File validPath = new File(directory, "valid_matrix.part" + H2O.SELF.index());
LOG.info("Saving node-local portion of XGBoost validation dataset to " + validPath.getAbsolutePath() + ".");
validMatrix.saveBinary(validPath.getAbsolutePath());
}
}
_rabitEnv.put("DMLC_TASK_ID", String.valueOf(H2O.SELF.index()));
XGBoostUpdater thread = XGBoostUpdater.make(_modelKey, trainMatrix, validMatrix, _boosterParms, _checkpoint, _rabitEnv);
thread.start(); // we do not need to wait for the Updater to init Rabit - subsequent tasks will wait
}
/**
* Finds what nodes actually do carry some of data of a given Frame
* @param fr frame to find nodes for
* @return FrameNodes
*/
public static FrameNodes findFrameNodes(Frame fr) {
// Count on how many nodes the data resides
boolean[] nodesHoldingFrame = new boolean[H2O.CLOUD.size()];
Vec vec = fr.anyVec();
for(int chunkNr = 0; chunkNr < vec.nChunks(); chunkNr++) {
int home = vec.chunkKey(chunkNr).home_node().index();
if (! nodesHoldingFrame[home])
nodesHoldingFrame[home] = true;
}
return new FrameNodes(fr, nodesHoldingFrame);
}
public static class FrameNodes {
public final Frame _fr;
public final boolean[] _nodes;
public final int _numNodes;
private FrameNodes(Frame fr, boolean[] nodes) {
_fr = fr;
_nodes = nodes;
int n = 0;
for (boolean f : _nodes)
if (f) n++;
_numNodes = n;
}
public int getNumNodes() { return _numNodes; }
public boolean isSubsetOf(FrameNodes otherNodes) {
for (int i = 0; i < _nodes.length; i++) {
if (_nodes[i] && !otherNodes._nodes[i]) {
return false;
}
}
return true;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostUpdateTask.java
|
package hex.tree.xgboost.task;
import ai.h2o.xgboost4j.java.Booster;
import hex.tree.xgboost.EvalMetric;
import org.apache.log4j.Logger;
import water.*;
public class XGBoostUpdateTask extends AbstractXGBoostTask<XGBoostUpdateTask> {
private static final Logger LOG = Logger.getLogger(XGBoostUpdateTask.class);
private final int _tid;
public XGBoostUpdateTask(XGBoostSetupTask setupTask, int tid) {
super(setupTask);
_tid = tid;
}
@Override
protected void execute() {
Booster booster = XGBoostUpdater.getUpdater(_modelKey).doUpdate(_tid);
if (booster == null)
throw new IllegalStateException("Boosting iteration didn't produce a valid Booster.");
}
public byte[] getBoosterBytes() {
final H2ONode boosterNode = getBoosterNode();
final byte[] boosterBytes;
if (H2O.SELF.equals(boosterNode)) {
boosterBytes = XGBoostUpdater.getUpdater(_modelKey).getBoosterBytes();
} else {
LOG.debug("Booster will be retrieved from a remote node, node=" + boosterNode);
FetchBoosterTask t = new FetchBoosterTask(_modelKey);
boosterBytes = new RPC<>(boosterNode, t).call().get()._boosterBytes;
}
return boosterBytes;
}
public EvalMetric getEvalMetric() {
final H2ONode boosterNode = getBoosterNode();
final EvalMetric evalMetric;
if (H2O.SELF.equals(boosterNode)) {
evalMetric = XGBoostUpdater.getUpdater(_modelKey).getEvalMetric();
} else {
LOG.debug("CustomMetric will be retrieved from a remote node, node=" + boosterNode);
FetchEvalMetricTask t = new FetchEvalMetricTask(_modelKey);
evalMetric = new RPC<>(boosterNode, t).call().get()._evalMetric;
}
return evalMetric;
}
private static class FetchBoosterTask extends DTask<FetchBoosterTask> {
private final Key _modelKey;
// OUT
private byte[] _boosterBytes;
private FetchBoosterTask(Key modelKey) {
_modelKey = modelKey;
}
@Override
public void compute2() {
_boosterBytes = XGBoostUpdater.getUpdater(_modelKey).getBoosterBytes();
tryComplete();
}
}
private static class FetchEvalMetricTask extends DTask<FetchEvalMetricTask> {
private final Key _modelKey;
// OUT
private EvalMetric _evalMetric;
private FetchEvalMetricTask(Key modelKey) {
_modelKey = modelKey;
}
@Override
public void compute2() {
_evalMetric = XGBoostUpdater.getUpdater(_modelKey).getEvalMetric();
tryComplete();
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostUpdater.java
|
package hex.tree.xgboost.task;
import hex.tree.xgboost.BoosterParms;
import ai.h2o.xgboost4j.java.*;
import hex.tree.xgboost.EvalMetric;
import org.apache.log4j.Logger;
import water.H2O;
import water.Key;
import water.nbhm.NonBlockingHashMap;
import water.util.Log;
import java.util.Map;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
public class XGBoostUpdater extends Thread {
private static final Logger LOG = Logger.getLogger(XGBoostUpdater.class);
private static final long WORK_START_TIMEOUT_SECS = 5 * 60; // Each Booster task should start before this timer expires
private static final long INACTIVE_CHECK_INTERVAL_SECS = 60;
private static final NonBlockingHashMap<Key, XGBoostUpdater> updaters = new NonBlockingHashMap<>();
private final Key _modelKey;
private final DMatrix _trainMat;
private final DMatrix _validMat;
private final BoosterParms _boosterParms;
private final String _evalMetricSpec;
private final byte[] _checkpointBoosterBytes;
private final Map<String, String> _rabitEnv;
private volatile SynchronousQueue<BoosterCallable<?>> _in;
private volatile SynchronousQueue<Object> _out;
private BoosterWrapper _booster;
private volatile EvalMetric _evalMetric;
private XGBoostUpdater(
Key modelKey, DMatrix trainMat, DMatrix validMat, BoosterParms boosterParms,
byte[] checkpointBoosterBytes, Map<String, String> rabitEnv
) {
super("XGBoostUpdater-" + modelKey);
_modelKey = modelKey;
_trainMat = trainMat;
_validMat = validMat;
_boosterParms = boosterParms;
_checkpointBoosterBytes = checkpointBoosterBytes;
_rabitEnv = rabitEnv;
_evalMetricSpec = (String) _boosterParms.get().get("eval_metric");
_in = new SynchronousQueue<>();
_out = new SynchronousQueue<>();
}
@Override
public void run() {
try {
Rabit.init(_rabitEnv);
while (! interrupted()) {
BoosterCallable<?> task = _in.take();
Object result = task.call();
_out.put(result);
}
} catch (InterruptedException e) {
XGBoostUpdater self = updaters.get(_modelKey);
if (self != null) {
LOG.error("Updater thread was interrupted while it was still registered, name=" + self.getName(), e);
} else {
LOG.debug("Updater thread interrupted.", e);
}
Thread.currentThread().interrupt();
} catch (XGBoostError e) {
LOG.error("XGBoost training iteration failed", e);
} finally {
_in = null; // Will throw NPE if used wrong
_out = null;
updaters.remove(_modelKey);
try {
_trainMat.dispose();
if (_validMat != null) {
_validMat.dispose();
}
if (_booster != null)
_booster.dispose();
} catch (Exception e) {
LOG.warn("Failed to dispose of training matrix/booster", e);
}
try {
Rabit.shutdown();
} catch (Exception xgBoostError) {
LOG.warn("Rabit shutdown during update failed", xgBoostError);
}
}
}
@SuppressWarnings("unchecked")
private <T> T invoke(BoosterCallable<T> callable) throws InterruptedException {
final SynchronousQueue<BoosterCallable<?>> inQ = _in;
if (inQ == null)
throw new IllegalStateException("Updater is inactive on node " + H2O.SELF);
if (! inQ.offer(callable, WORK_START_TIMEOUT_SECS, TimeUnit.SECONDS))
throw new IllegalStateException("XGBoostUpdater couldn't start work on task " + callable + " in " + WORK_START_TIMEOUT_SECS + "s.");
SynchronousQueue<?> outQ;
int i = 0;
while ((outQ = _out) != null) {
i++;
T result = (T) outQ.poll(INACTIVE_CHECK_INTERVAL_SECS, TimeUnit.SECONDS);
if (result != null) {
return result;
} else if (i > 5) {
LOG.warn(String.format("XGBoost task of type '%s' is taking unexpectedly long, it didn't finish in %d seconds.",
callable, INACTIVE_CHECK_INTERVAL_SECS * i));
}
}
throw new IllegalStateException("Cannot perform booster operation: updater is inactive on node " + H2O.SELF);
}
private class UpdateBooster implements BoosterCallable<Booster> {
private final int _tid;
private UpdateBooster(int tid) { _tid = tid; }
@Override
public Booster call() throws XGBoostError {
if ((_booster == null) && _tid == 0) {
_booster = new BoosterWrapper(_checkpointBoosterBytes, _boosterParms.get(), _trainMat, _validMat);
_evalMetric = computeEvalMetric();
// Force Booster initialization; we can call any method that does "lazy init"
byte[] boosterBytes = _booster.toByteArray();
LOG.info("Initial Booster created, size=" + boosterBytes.length);
} else {
// Do one iteration
assert _booster != null;
_booster.update(_trainMat, _tid);
_evalMetric = computeEvalMetric();
_booster.saveRabitCheckpoint();
}
return _booster.getBooster();
}
private EvalMetric computeEvalMetric() throws XGBoostError {
if (_evalMetricSpec == null) {
return null;
}
final String evalMetricVal = _booster.evalSet(_trainMat, _validMat, _tid);
return parseEvalMetric(evalMetricVal);
}
@Override
public String toString() {
return "Boosting Iteration (tid=" + _tid + ")";
}
}
private EvalMetric parseEvalMetric(String evalMetricVal) {
return parseEvalMetric(_evalMetricSpec, _validMat != null, evalMetricVal);
}
static EvalMetric parseEvalMetric(String evalMetricSpec, boolean hasValid, String evalMetricVal) {
final String[] parts = evalMetricVal.split("\t");
final int expectedParts = hasValid ? 3 : 2;
if (parts.length != expectedParts) {
Log.err("Evaluation metric cannot be parsed, unexpected number of elements. Value: '" + evalMetricSpec + "'.");
return EvalMetric.empty(evalMetricSpec);
}
double trainVal, validVal = Double.NaN;
trainVal = parseEvalMetricPart(parts[1]);
if (hasValid) {
validVal = parseEvalMetricPart(parts[2]);
}
return new EvalMetric(evalMetricSpec, trainVal, validVal);
}
static double parseEvalMetricPart(String evalMetricVal) {
final int sepPos = evalMetricVal.lastIndexOf(":");
if (sepPos >= 0) {
String valStr = evalMetricVal.substring(sepPos + 1).trim();
try {
return Double.parseDouble(valStr);
} catch (Exception e) {
Log.err("Failed to parse value of evaluation metric: '" + evalMetricVal + "'.", e);
}
}
return Double.NaN;
}
private class SerializeBooster implements BoosterCallable<byte[]> {
@Override
public byte[] call() throws XGBoostError {
return _booster.toByteArray();
}
@Override
public String toString() {
return "SerializeBooster";
}
}
byte[] getBoosterBytes() {
try {
return invoke(new SerializeBooster());
} catch (InterruptedException e) {
throw new IllegalStateException("Failed to serialize Booster - operation was interrupted", e);
}
}
EvalMetric getEvalMetric() {
return _evalMetric;
}
Booster doUpdate(int tid) {
try {
return invoke(new UpdateBooster(tid));
} catch (InterruptedException e) {
throw new IllegalStateException("Boosting iteration failed - operation was interrupted", e);
}
}
static XGBoostUpdater make(Key modelKey, DMatrix trainMat, DMatrix validMat, BoosterParms boosterParms,
byte[] checkpoint, Map<String, String> rabitEnv) {
XGBoostUpdater updater = new XGBoostUpdater(modelKey, trainMat, validMat, boosterParms, checkpoint, rabitEnv);
updater.setUncaughtExceptionHandler(LoggingExceptionHandler.INSTANCE);
if (updaters.putIfAbsent(modelKey, updater) != null)
throw new IllegalStateException("XGBoostUpdater for modelKey=" + modelKey + " already exists!");
return updater;
}
static void terminate(Key modelKey) {
XGBoostUpdater updater = updaters.remove(modelKey);
if (updater == null)
LOG.debug("XGBoostUpdater for modelKey=" + modelKey + " was already clean-up on node " + H2O.SELF);
else
updater.interrupt();
}
static XGBoostUpdater getUpdater(Key modelKey) {
XGBoostUpdater updater = updaters.get(modelKey);
if (updater == null) {
throw new IllegalStateException("XGBoostUpdater for modelKey=" + modelKey + " was not found!");
}
return updater;
}
private interface BoosterCallable<E> {
E call() throws XGBoostError;
}
private static class LoggingExceptionHandler implements UncaughtExceptionHandler {
private static LoggingExceptionHandler INSTANCE = new LoggingExceptionHandler();
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("Uncaught exception in " + t.getName(), e);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/task/XGBoostUploadMatrixTask.java
|
package hex.tree.xgboost.task;
import hex.DataInfo;
import hex.tree.xgboost.XGBoostModel;
import hex.tree.xgboost.XGBoostModelInfo;
import hex.tree.xgboost.exec.XGBoostHttpClient;
import hex.tree.xgboost.matrix.SparseMatrixDimensions;
import hex.tree.xgboost.remote.RemoteXGBoostUploadServlet;
import water.BootstrapFreezable;
import org.apache.log4j.Logger;
import water.H2O;
import water.Iced;
import water.LocalMR;
import water.MrFun;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.ArrayUtils;
import water.util.VecUtils;
import java.util.Optional;
import static hex.tree.xgboost.XGBoostUtils.sumChunksLength;
import static hex.tree.xgboost.matrix.MatrixFactoryUtils.setResponseWeightAndOffset;
import static hex.tree.xgboost.matrix.SparseMatrixFactory.calculateCSRMatrixDimensions;
import static water.MemoryManager.malloc4f;
import static hex.tree.xgboost.remote.RemoteXGBoostUploadServlet.MatrixRequestType.*;
public class XGBoostUploadMatrixTask extends AbstractXGBoostTask<XGBoostUploadMatrixTask> {
private static final Logger LOG = Logger.getLogger(XGBoostUploadMatrixTask.class);
private final String[] remoteNodes;
private final boolean https;
private final String contextPath;
private final String userName;
private final String password;
private final Frame frame;
private final boolean isTrain;
private final XGBoostModelInfo modelInfo;
private final XGBoostModel.XGBoostParameters parms;
private final boolean sparse;
public XGBoostUploadMatrixTask(
XGBoostModel model, Frame frame, boolean isTrain, boolean[] frameNodes, String[] remoteNodes,
boolean https, String contextPath, String userName, String password
) {
super(model._key, frameNodes);
this.remoteNodes = remoteNodes;
this.https = https;
this.contextPath = contextPath;
this.userName = userName;
this.password = password;
this.modelInfo = model.model_info();
this.parms = model._parms;
this.sparse = model._output._sparse;
this.frame = frame;
this.isTrain = isTrain;
}
private XGBoostHttpClient makeClient() {
String remoteUri = remoteNodes[H2O.SELF.index()] + contextPath;
return new XGBoostHttpClient(remoteUri, https, userName, password);
}
@Override
protected void execute() {
XGBoostHttpClient client = makeClient();
LOG.info("Starting matrix upload for " + _modelKey);
long start = System.currentTimeMillis();
assert modelInfo.dataInfo() != null;
int[] chunks = VecUtils.getLocalChunkIds(frame.anyVec());
final Vec responseVec = frame.vec(parms._response_column);
final Vec weightVec = frame.vec(parms._weights_column);
final Vec offsetsVec = frame.vec(parms._offset_column);
final int[] nRowsByChunk = new int[chunks.length];
final long nRowsL = sumChunksLength(chunks, responseVec, Optional.ofNullable(weightVec), nRowsByChunk);
if (nRowsL > Integer.MAX_VALUE) {
throw new IllegalArgumentException("XGBoost currently doesn't support datasets with more than " +
Integer.MAX_VALUE + " per node. " +
"To train a XGBoost model on this dataset add more nodes to your H2O cluster and use distributed training.");
}
final int nRows = (int) nRowsL;
MatrixData matrixData = new MatrixData(nRows, weightVec, offsetsVec);
if (sparse) {
LOG.debug("Treating matrix as sparse.");
matrixData.shape = modelInfo.dataInfo().fullN();
matrixData.actualRows = csr(
client, chunks, weightVec, offsetsVec, responseVec, modelInfo.dataInfo(),
matrixData.resp, matrixData.weights, matrixData.offsets
);
} else {
LOG.debug("Treating matrix as dense.");
matrixData.actualRows = dense(
client, chunks, nRows, nRowsByChunk, weightVec, offsetsVec, responseVec, modelInfo.dataInfo(),
matrixData.resp, matrixData.weights, matrixData.offsets
);
}
client.uploadMatrixData(_modelKey, RemoteXGBoostUploadServlet.MatrixRequestType.matrixData, isTrain, matrixData);
LOG.debug("Matrix upload finished in " + ((System.currentTimeMillis() - start) / 1000d));
}
public static class MatrixData extends Iced<MatrixData> implements BootstrapFreezable<MatrixData> {
public final float[] resp;
public final float[] weights;
public final float[] offsets;
public int actualRows;
public int shape;
MatrixData(int nRows, Vec weightVec, Vec offsetsVec) {
resp = malloc4f(nRows);
if (weightVec != null) {
weights = malloc4f(nRows);
} else {
weights = null;
}
if (offsetsVec != null) {
offsets = malloc4f(nRows);
} else {
offsets = null;
}
}
}
public static class DenseMatrixDimensions extends Iced<DenseMatrixDimensions> implements BootstrapFreezable<DenseMatrixDimensions> {
public final int rows;
public final int cols;
public final int[] rowOffsets;
public DenseMatrixDimensions(int rows, int cols, int[] rowOffsets) {
this.rows = rows;
this.cols = cols;
this.rowOffsets = rowOffsets;
}
}
private int dense(
XGBoostHttpClient client, int[] chunksIds, int nRows, int[] nRowsByChunk,
Vec weightVec, Vec offsetsVec, Vec responseVec, DataInfo dataInfo,
float[] resp, float[] weights, float[] offsets
) {
int[] rowOffsets = new int[nRowsByChunk.length + 1];
for (int i = 0; i < chunksIds.length; i++) {
rowOffsets[i + 1] = nRowsByChunk[i] + rowOffsets[i];
}
client.uploadMatrixData(_modelKey, denseMatrixDimensions, isTrain, new DenseMatrixDimensions(nRows, dataInfo.fullN(), rowOffsets));
UploadDenseChunkFun writeFun = new UploadDenseChunkFun(
frame, chunksIds, rowOffsets, weightVec, offsetsVec, responseVec, dataInfo, resp, weights, offsets
);
H2O.submitTask(new LocalMR<>(writeFun, chunksIds.length)).join();
return writeFun.getTotalRows();
}
public static class DenseMatrixChunk extends Iced<DenseMatrixChunk> implements BootstrapFreezable<DenseMatrixChunk> {
public final int id;
public final float[] data;
DenseMatrixChunk(int id, int dataSize) {
this.id = id;
this.data = new float[dataSize];
}
}
private class UploadDenseChunkFun extends MrFun<UploadDenseChunkFun> {
private final Frame _f;
private final int[] _chunks;
private final int[] _rowOffsets;
private final Vec _weightsVec;
private final Vec _offsetsVec;
private final Vec _respVec;
private final DataInfo _di;
private final float[] _resp;
private final float[] _weights;
private final float[] _offsets;
// OUT
private final int[] _nRowsByChunk;
private UploadDenseChunkFun(
Frame f, int[] chunks, int[] rowOffsets, Vec weightsVec, Vec offsetsVec, Vec respVec, DataInfo di,
float[] resp, float[] weights, float[] offsets
) {
_f = f;
_chunks = chunks;
_rowOffsets = rowOffsets;
_weightsVec = weightsVec;
_offsetsVec = offsetsVec;
_respVec = respVec;
_di = di;
_resp = resp;
_weights = weights;
_offsets = offsets;
_nRowsByChunk = new int[chunks.length];
}
@Override
protected void map(int id) {
final int chunkIdx = _chunks[id];
Chunk[] chks = new Chunk[_f.numCols()];
for (int c = 0; c < chks.length; c++) {
chks[c] = _f.vec(c).chunkForChunkIdx(chunkIdx);
}
Chunk weightsChk = _weightsVec != null ? _weightsVec.chunkForChunkIdx(chunkIdx) : null;
Chunk offsetsChk = _offsetsVec != null ? _offsetsVec.chunkForChunkIdx(chunkIdx) : null;
Chunk respChk = _respVec.chunkForChunkIdx(chunkIdx);
int idx = 0;
DenseMatrixChunk chunkData = new DenseMatrixChunk(id, (_rowOffsets[id+1] - _rowOffsets[id]) * _di.fullN());
int actualRows = 0;
for (int i = 0; i < chks[0]._len; i++) {
if (weightsChk != null && weightsChk.atd(i) == 0) continue;
idx = writeDenseRow(_di, chks, i, chunkData.data, idx);
_resp[_rowOffsets[id] + actualRows] = (float) respChk.atd(i);
if (weightsChk != null) {
_weights[_rowOffsets[id] + actualRows] = (float) weightsChk.atd(i);
}
if (offsetsChk != null) {
_offsets[_rowOffsets[id] + actualRows] = (float) offsetsChk.atd(i);
}
actualRows++;
}
assert idx == chunkData.data.length : "idx should be " + chunkData.data.length + " but it is " + idx;
_nRowsByChunk[id] = actualRows;
makeClient().uploadMatrixData(_modelKey, denseMatrixChunk, isTrain, chunkData);
}
private int writeDenseRow(
DataInfo di, Chunk[] chunks, int rowInChunk, float[] data, int idx
) {
for (int j = 0; j < di._cats; j++) {
int len = di._catOffsets[j+1] - di._catOffsets[j];
double val = chunks[j].isNA(rowInChunk) ? Double.NaN : chunks[j].at8(rowInChunk);
int pos = di.getCategoricalId(j, val) - di._catOffsets[j];
data[idx + pos] = 1f;
idx += len;
}
for (int j = 0; j < di._nums; j++) {
float val = chunks[di._cats + j].isNA(rowInChunk) ? Float.NaN : (float) chunks[di._cats + j].atd(rowInChunk);
data[idx++] = val;
}
return idx;
}
private int getTotalRows() {
int totalRows = 0;
for (int r : _nRowsByChunk) {
totalRows += r;
}
return totalRows;
}
}
private int csr(
XGBoostHttpClient client, int[] chunksIds,
Vec weightVec, Vec offsetsVec, Vec responseVec, DataInfo dataInfo,
float[] resp, float[] weights, float[] offsets
) {
SparseMatrixDimensions dimensions = calculateCSRMatrixDimensions(frame, chunksIds, weightVec, dataInfo);
client.uploadMatrixData(_modelKey, sparseMatrixDimensions, isTrain, dimensions);
UploadSparseMatrixFun fun = new UploadSparseMatrixFun(
frame, chunksIds, weightVec, offsetsVec, dataInfo, dimensions, responseVec, resp, weights, offsets
);
H2O.submitTask(new LocalMR<>(fun, chunksIds.length)).join();
return ArrayUtils.sum(fun._actualRows);
}
public static class SparseMatrixChunk extends Iced<SparseMatrixChunk> implements BootstrapFreezable<SparseMatrixChunk> {
public final int id;
public final long[] rowHeader;
public final float[] data;
public final int[] colIndices;
SparseMatrixChunk(int id, int rowHeaderSize, int dataSize) {
this.id = id;
this.rowHeader = new long[rowHeaderSize];
this.data = new float[dataSize];
this.colIndices = new int[dataSize];
}
}
private class UploadSparseMatrixFun extends MrFun<UploadSparseMatrixFun> {
Frame _frame;
int[] _chunks;
Vec _weightVec;
Vec _offsetsVec;
DataInfo _di;
SparseMatrixDimensions _dims;
Vec _respVec;
float[] _resp;
float[] _weights;
float[] _offsets;
// OUT
int[] _actualRows;
UploadSparseMatrixFun(
Frame frame, int[] chunks, Vec weightVec, Vec offsetVec, DataInfo di,
SparseMatrixDimensions dimensions,
Vec respVec, float[] resp, float[] weights, float[] offsets
) {
_actualRows = new int[chunks.length];
_frame = frame;
_chunks = chunks;
_weightVec = weightVec;
_offsetsVec = offsetVec;
_di = di;
_dims = dimensions;
_respVec = respVec;
_resp = resp;
_weights = weights;
_offsets = offsets;
}
@Override
protected void map(int chunkIdx) {
int chunk = _chunks[chunkIdx];
long nonZeroCount = _dims._precedingNonZeroElementsCounts[chunkIdx];
int rwRow = _dims._precedingRowCounts[chunkIdx];
int rowHeaderSize;
long dataSize;
if (chunkIdx == _dims._precedingNonZeroElementsCounts.length-1) {
rowHeaderSize = _dims._rowHeadersCount - rwRow;
dataSize = _dims._nonZeroElementsCount - nonZeroCount;
} else {
rowHeaderSize = _dims._precedingRowCounts[chunkIdx+1] - rwRow + 1;
dataSize = _dims._precedingNonZeroElementsCounts[chunkIdx+1] - nonZeroCount;
}
assert dataSize < Integer.MAX_VALUE;
Chunk weightChunk = _weightVec != null ? _weightVec.chunkForChunkIdx(chunk) : null;
Chunk offsetChunk = _offsetsVec != null ? _offsetsVec.chunkForChunkIdx(chunk) : null;
Chunk respChunk = _respVec.chunkForChunkIdx(chunk);
Chunk[] featChunks = new Chunk[_frame.vecs().length];
for (int i = 0; i < featChunks.length; i++) {
featChunks[i] = _frame.vecs()[i].chunkForChunkIdx(chunk);
}
SparseMatrixChunk chunkData = new SparseMatrixChunk(chunkIdx, rowHeaderSize, (int) dataSize);
int dataIndex = 0;
int rowHeaderIndex = 0;
for(int i = 0; i < respChunk._len; i++) {
if (weightChunk != null && weightChunk.atd(i) == 0) continue;
chunkData.rowHeader[rowHeaderIndex++] = nonZeroCount;
_actualRows[chunkIdx]++;
for (int j = 0; j < _di._cats; j++) {
chunkData.data[dataIndex] = 1;
if (featChunks[j].isNA(i)) {
chunkData.colIndices[dataIndex] = _di.getCategoricalId(j, Float.NaN);
} else {
chunkData.colIndices[dataIndex] = _di.getCategoricalId(j, featChunks[j].at8(i));
}
dataIndex++;
nonZeroCount++;
}
for (int j = 0; j < _di._nums; j++) {
float val = (float) featChunks[_di._cats + j].atd(i);
if (val != 0) {
chunkData.data[dataIndex] = val;
chunkData.colIndices[dataIndex] = _di._catOffsets[_di._catOffsets.length - 1] + j;
dataIndex++;
nonZeroCount++;
}
}
rwRow = setResponseWeightAndOffset(weightChunk, offsetChunk, respChunk, _resp, _weights, _offsets, rwRow, i);
}
chunkData.rowHeader[rowHeaderIndex] = nonZeroCount;
makeClient().uploadMatrixData(_modelKey, sparseMatrixChunk, isTrain, chunkData);
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/BoosterDump.java
|
package hex.tree.xgboost.util;
import hex.genmodel.MojoReaderBackend;
import hex.genmodel.MojoReaderBackendFactory;
import ai.h2o.xgboost4j.java.Booster;
import ai.h2o.xgboost4j.java.XGBoostError;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
public class BoosterDump {
public static String[] getBoosterDump(byte[] boosterBytes, String featureMap, final boolean withStats, final String format) {
final Path featureMapFile;
if (featureMap != null && ! featureMap.isEmpty())
try {
featureMapFile = Files.createTempFile("featureMap", ".txt");
} catch (IOException e) {
throw new IllegalStateException("Unable to write a temporary file with featureMap");
}
else
featureMapFile = null;
try {
if (featureMapFile != null) {
Files.write(featureMapFile, Collections.singletonList(featureMap), Charset.defaultCharset(), StandardOpenOption.WRITE);
}
Booster booster = BoosterHelper.loadModel(new ByteArrayInputStream(boosterBytes));
BoosterHelper.BoosterOp<String[]> dumpOp = booster1 -> {
String featureMap1 = featureMapFile != null ? featureMapFile.toFile().getAbsolutePath() : null;
return booster1.getModelDump(featureMap1, withStats, format);
};
return BoosterHelper.doWithLocalRabit(dumpOp, booster);
} catch (IOException e) {
throw new IllegalStateException("Failed to write feature map file", e);
} catch (XGBoostError e) {
throw new IllegalStateException("Failed to dump model", e);
} finally {
if (featureMapFile != null) {
try {
Files.deleteIfExists(featureMapFile);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
public static void main(String[] args) throws IOException {
if (args.length < 2 || ! "--dump".equals(args[0])) {
usage();
System.exit(1);
}
String mojoFile = args[1];
boolean withStats = args.length > 2 && Boolean.parseBoolean(args[2]);
String format = args.length > 3 ? args[3] : "text";
String featureMap = null;
MojoReaderBackend reader = MojoReaderBackendFactory.createReaderBackend(mojoFile);
if (reader.exists("feature_map")) {
featureMap = new String(reader.getBinaryFile("feature_map"), StandardCharsets.UTF_8);
}
byte[] boosterBytes = reader.getBinaryFile("boosterBytes");
for (String dumpLine : getBoosterDump(boosterBytes, featureMap, withStats, format)) {
System.out.println(dumpLine);
}
}
private static void usage() {
System.out.println("java -cp h2o-genmodel.jar " + BoosterDump.class.getCanonicalName() + " --dump <mojo> [withStats?] [format]");
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/BoosterHelper.java
|
package hex.tree.xgboost.util;
import ai.h2o.xgboost4j.java.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
/**
* Utility to access package private Booster methods.
*/
public class BoosterHelper {
public static Booster loadModel(InputStream in) {
try {
return XGBoost.loadModel(in);
} catch (XGBoostError | IOException e) {
throw new IllegalStateException("Failed to load booster.", e);
}
}
public static Booster loadModel(byte[] boosterBytes) {
if (boosterBytes == null) {
throw new IllegalArgumentException("Booster not initialized!");
}
return loadModel(new ByteArrayInputStream(boosterBytes));
}
/**
* Invalidates XGBoost objects (Booster, DMatrix) and frees up their memory
* @param xgbObjects list of XGBoost objects
* @throws IllegalStateException when object invalidation fails, only the first exception will be reported (as the
* exception cause), we assume the other ones will have a same reason
*/
public static void dispose(Object... xgbObjects) throws IllegalStateException {
Exception firstException = null;
for (Object xgbObject : xgbObjects) {
if (xgbObject == null)
continue;
if (xgbObject instanceof Booster) {
try {
((Booster) xgbObject).dispose();
} catch (Exception e) {
if (firstException == null)
firstException = e;
}
} else if (xgbObject instanceof DMatrix) {
try {
((DMatrix) xgbObject).dispose();
} catch (Exception e) {
if (firstException == null)
firstException = e;
}
} else
assert false : "Unsupported XGBoost object type: " + xgbObject.getClass();
}
if (firstException != null) {
throw new IllegalStateException("We were unable to free-up xgboost memory. " +
"This could indicate a memory leak and it can lead to H2O instability.", firstException);
}
}
public interface BoosterOp<X> {
X apply(Booster booster) throws XGBoostError;
}
public static <X> X doWithLocalRabit(BoosterOp<X> op, Booster booster) throws XGBoostError {
boolean shutdownRabit = true;
try {
Map<String, String> rabitEnv = new HashMap<>();
rabitEnv.put("DMLC_TASK_ID", "0");
Rabit.init(rabitEnv);
shutdownRabit = true;
X result = op.apply(booster);
Rabit.shutdown();
shutdownRabit = false;
return result;
} finally {
if (shutdownRabit)
try {
Rabit.shutdown();
} catch (XGBoostError e) {
e.printStackTrace(); // don't rely on logging support in genmodel
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/FeatureScore.java
|
package hex.tree.xgboost.util;
import water.BootstrapFreezable;
import water.Iced;
public class FeatureScore extends Iced<FeatureScore> implements BootstrapFreezable<FeatureScore> {
public static final String GAIN_KEY = "gain";
public static final String COVER_KEY = "cover";
public int _frequency = 1;
public float _gain;
public float _cover;
public void add(FeatureScore fs) {
_frequency += fs._frequency;
_gain += fs._gain;
_cover += fs._cover;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/GpuUtils.java
|
package hex.tree.xgboost.util;
import ai.h2o.xgboost4j.java.*;
import org.apache.log4j.Logger;
import water.DTask;
import water.H2O;
import water.H2ONode;
import water.RPC;
import java.io.IOException;
import java.util.*;
public class GpuUtils {
private static final Logger LOG = Logger.getLogger(GpuUtils.class);
public static final int[] DEFAULT_GPU_ID = new int[] { 0 };
private static volatile boolean defaultGpuIdNotValid = false;
private static volatile boolean gpuSearchPerformed = false;
private static final Set<Integer> GPUS = new HashSet<>();
static boolean isGpuSupportEnabled() {
try {
INativeLibLoader loader = NativeLibLoader.getLoader();
if (!(loader instanceof NativeLibraryLoaderChain))
return false;
NativeLibraryLoaderChain chainLoader = (NativeLibraryLoaderChain) loader;
NativeLibrary lib = chainLoader.getLoadedLibrary();
return lib.hasCompilationFlag(NativeLibrary.CompilationFlags.WITH_GPU);
} catch (IOException e) {
LOG.debug(e);
return false;
}
}
private static boolean gpuCheckEnabled() {
return H2O.getSysBoolProperty("xgboost.gpu.check.enabled", true);
}
public static int numGPUs(H2ONode node) {
return allGPUs(node).size();
}
public static Set<Integer> allGPUs(H2ONode node) {
if (H2O.SELF.equals(node)) {
return allGPUs();
} else {
AllGPUsTask t = new AllGPUsTask();
new RPC<>(node, t).call().get();
return new HashSet<>(Arrays.asList(t.gpuIds));
}
}
private static class AllGPUsTask extends DTask<HasGPUTask> {
// OUT
private Integer[] gpuIds;
private AllGPUsTask() {
}
@Override
public void compute2() {
gpuIds = allGPUs().toArray(new Integer[0]);
tryComplete();
}
}
public static Set<Integer> allGPUs() {
if (gpuSearchPerformed) return Collections.unmodifiableSet(GPUS);
int nextGpuId = 0;
while (hasGPU(new int[] { nextGpuId })) {
nextGpuId++;
}
gpuSearchPerformed = true;
return Collections.unmodifiableSet(GPUS);
}
public static boolean hasGPU(H2ONode node, int[] gpu_id) {
final boolean hasGPU;
if (H2O.SELF.equals(node)) {
hasGPU = hasGPU(gpu_id);
} else {
HasGPUTask t = new HasGPUTask(gpu_id);
new RPC<>(node, t).call().get();
hasGPU = t._hasGPU;
}
LOG.debug("Availability of GPU (id=" + Arrays.toString(gpu_id) + ") on node " + node + ": " + hasGPU);
return hasGPU;
}
private static class HasGPUTask extends DTask<HasGPUTask> {
private final int[] _gpu_id;
// OUT
private boolean _hasGPU;
private HasGPUTask(int[] gpu_id) {
_gpu_id = gpu_id;
}
@Override
public void compute2() {
_hasGPU = hasGPU(_gpu_id);
tryComplete();
}
}
public static boolean hasGPU(int[] gpu_id) {
if (!gpuCheckEnabled()) {
return true;
}
if (gpu_id == null && defaultGpuIdNotValid) // quick default path & no synchronization - if we already know we don't have the default GPU, let's not to find out again
return false;
boolean hasGPU = true;
if (gpu_id == null) gpu_id = DEFAULT_GPU_ID;
for (int i = 0; hasGPU && i < gpu_id.length; i++) {
hasGPU = hasGPU_impl(gpu_id[i]);
}
if (Arrays.equals(gpu_id, DEFAULT_GPU_ID) && !hasGPU) {
defaultGpuIdNotValid = true; // this can never change back
}
return hasGPU;
}
public static boolean hasGPU() {
return hasGPU(null);
}
// helper
private static synchronized boolean hasGPU_impl(int gpu_id) {
if (!isGpuSupportEnabled()) {
return false;
}
if (GPUS.contains(gpu_id)) {
return true;
}
DMatrix trainMat;
try {
trainMat = new DMatrix(new float[]{1, 2, 1, 2}, 2, 2);
trainMat.setLabel(new float[]{1, 0});
} catch (XGBoostError xgBoostError) {
throw new IllegalStateException("Couldn't prepare training matrix for XGBoost.", xgBoostError);
}
HashMap<String, Object> params = new HashMap<>();
params.put("tree_method", "gpu_hist");
params.put("silent", 1);
params.put("fail_on_invalid_gpu_id", true);
params.put("gpu_id", gpu_id);
HashMap<String, DMatrix> watches = new HashMap<>();
watches.put("train", trainMat);
try {
Map<String, String> localRabitEnv = new HashMap<>();
Rabit.init(localRabitEnv);
ai.h2o.xgboost4j.java.XGBoost.train(trainMat, params, 1, watches, null, null);
GPUS.add(gpu_id);
return true;
} catch (XGBoostError xgBoostError) {
return false;
} finally {
try {
Rabit.shutdown();
} catch (XGBoostError e) {
LOG.warn("Cannot shutdown XGBoost Rabit for current thread.");
}
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/NativeLibrary.java
|
/*
Copyright (c) 2014 by Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package hex.tree.xgboost.util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Representation of native library.
*/
public class NativeLibrary {
private static final Log logger = LogFactory.getLog(NativeLibrary.class);
public static CompilationFlags[] EMPTY_COMPILATION_FLAGS = new CompilationFlags[0];
/* Library compilation flags */
public enum CompilationFlags {
WITH_GPU, WITH_OMP
}
private final String name;
private final ClassLoader classLoader;
private final Platform platform;
private final CompilationFlags[] flags;
// Is this library loaded already
private boolean loaded = false;
static NativeLibrary nativeLibrary(String name, CompilationFlags[] flags) {
return new NativeLibrary(name, flags);
}
static NativeLibrary nativeLibrary(String name, CompilationFlags[] flags,
ClassLoader classLoader) {
return new NativeLibrary(name, flags, classLoader);
}
private NativeLibrary(String name, CompilationFlags[] flags) {
this(name, flags, NativeLibrary.class.getClassLoader());
}
private NativeLibrary(String name, CompilationFlags[] flags, ClassLoader classLoader) {
this.name = name;
this.classLoader = classLoader;
this.platform = Platform.geOSType();
this.flags = flags;
}
synchronized boolean load() throws IOException {
if (!loaded) {
loaded = doLoad();
}
return loaded;
}
/**
* Load order:
*
*/
private boolean doLoad() throws IOException {
final String libName = getName();
try {
System.loadLibrary(libName);
return true;
} catch (UnsatisfiedLinkError e) {
try {
return extractAndLoad(getPlatformLibraryPath());
} catch (IOException ioe) {
logger.warn("Failed to load library from both native path and jar!");
throw ioe;
}
}
}
private String getPlatformLibraryPath() {
return String.format("%s/%s/%s", getResourcePrefix(),
platform.getPlatform(),
platform.getPlatformLibName(getName()));
}
private String getResourcePrefix() {
return "lib";
}
private ClassLoader getClassLoader() {
return classLoader;
}
private boolean extractAndLoad(String libPath) throws IOException {
try {
URL libResource = getLibResource(libPath, getClassLoader());
if (libResource == null) {
logger.debug("We don't bundle library " + libPath);
return false;
}
File temp = extract(libPath, libResource);
// Finally, load the library
System.load(temp.getAbsolutePath());
// Perfect loaded, break the cycle
logger.info("Loaded library from " + libPath + " (" + temp.getAbsolutePath() + ")");
return true;
} catch (IOException | UnsatisfiedLinkError e) {
logger.warn("Cannot load library from path " + libPath);
throw new IOException(e);
}
}
private URL getLibResource() {
return getLibResource(getPlatformLibraryPath(), getClassLoader());
}
public boolean isBundled() {
return getLibResource() != null;
}
public File extractTo(File directory) throws IOException {
File target = new File(directory, platform.getPlatformLibName(getName()));
extractTo(getLibResource(), target);
return target;
}
private static void extractTo(URL libResource, File target) throws IOException {
try (InputStream is = libResource.openStream()) {
Files.copy(is, target.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
}
private static URL getLibResource(String libPath, ClassLoader classLoader) {
return classLoader.getResource(libPath);
}
private static File extract(String libPath, URL libResource)
throws IOException, IllegalArgumentException {
assert libResource != null : "Argument `libResource` cannot be null, make sure you only call `extract` for " +
"libraries that are available for the current platform.";
// Split filename to prefix and suffix (extension)
String filename = libPath.substring(libPath.lastIndexOf('/') + 1);
int lastDotIdx = filename.lastIndexOf('.');
String prefix = "";
String suffix = null;
if (lastDotIdx >= 0 && lastDotIdx < filename.length() - 1) {
prefix = filename.substring(0, lastDotIdx);
suffix = filename.substring(lastDotIdx);
}
// Prepare temporary file
File temp = File.createTempFile(prefix, suffix);
temp.deleteOnExit();
// Open output stream and copy data between source file in JAR and the temporary file
extractTo(libResource, temp);
return temp;
}
public String getName() {
return name;
}
public boolean hasCompilationFlag(CompilationFlags flag) {
for (CompilationFlags f : getCompilationFlags()) {
if (flag == f) return true;
}
return false;
}
public CompilationFlags[] getCompilationFlags() {
return flags;
}
@Override
public String toString() {
return String.format("%s (%s)", getName(), getPlatformLibraryPath());
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/NativeLibraryLoaderChain.java
|
/*
Copyright (c) 2014 by Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package hex.tree.xgboost.util;
import java.io.IOException;
import java.util.LinkedList;
import ai.h2o.xgboost4j.java.INativeLibLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import static hex.tree.xgboost.util.NativeLibrary.CompilationFlags.WITH_GPU;
import static hex.tree.xgboost.util.NativeLibrary.CompilationFlags.WITH_OMP;
import static hex.tree.xgboost.util.NativeLibrary.EMPTY_COMPILATION_FLAGS;
import static hex.tree.xgboost.util.NativeLibrary.nativeLibrary;
/**
* A simple loader which tries to load all
* specified libraries in a given order.
*/
public class NativeLibraryLoaderChain implements INativeLibLoader {
private static final Log logger = LogFactory.getLog(NativeLibraryLoaderChain.class);
private final NativeLibrary[] nativeLibs;
private NativeLibrary successfullyLoaded = null;
@SuppressWarnings("unused")
public NativeLibraryLoaderChain() {
this(
// GPU & OpenMP support enabled - backend will be decided at runtime based on availability
nativeLibrary("xgboost4j_gpu", new NativeLibrary.CompilationFlags[] {WITH_GPU, WITH_OMP}),
// Minimum version of library - no gpu, no omp
nativeLibrary("xgboost4j_minimal", EMPTY_COMPILATION_FLAGS)
);
}
private NativeLibraryLoaderChain(NativeLibrary... libs) {
assert libs != null : "Argument `libs` cannot be null.";
nativeLibs = libs;
}
public NativeLibrary[] getNativeLibs() {
return nativeLibs;
}
@Override
public void loadNativeLibs() throws IOException {
LinkedList<IOException> exs = new LinkedList<>();
for (NativeLibrary lib : nativeLibs) {
try {
// Try to load
if (lib.load()) {
// It was successful load, so remember it
successfullyLoaded = lib;
break;
}
} catch (IOException e) {
logger.info("Cannot load library: " + lib.toString());
exs.add(e);
}
}
if ((successfullyLoaded == null) && (! exs.isEmpty())) {
throw new IOException(exs.getLast());
}
}
@Override
public String name() {
return "NativeLibraryLoaderChain";
}
@Override
public int priority() {
return 1;
}
public NativeLibrary getLoadedLibrary() throws IOException {
if (successfullyLoaded != null) {
return successfullyLoaded;
} else {
throw new IOException("No binary library found!");
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/Platform.java
|
package hex.tree.xgboost.util;
/**
* A simple OS type wrapper.
*/
public enum Platform {
OSX("osx"),
LINUX("linux"),
WINDOWS("windows"),
UNKNOWN("unknown");
private final String name;
private final int bits;
Platform(String name) {
this.name = name;
this.bits = getBitModel();
}
public String getName() {
return name;
}
public String getPlatform() {
return name + "_" + bits;
}
public String getPlatformLibName(String libName) {
return System.mapLibraryName(libName);
}
public static Platform geOSType() {
String name = System.getProperty("os.name").toLowerCase().trim();
if (name.startsWith("linux")) {
return LINUX;
}
if (name.startsWith("mac os x")) {
return OSX;
}
if (name.startsWith("win")) {
return WINDOWS;
}
return UNKNOWN;
}
private static int getBitModel() {
String prop = System.getProperty("sun.arch.data.model");
if (prop == null) {
prop = System.getProperty("com.ibm.vm.bitmode");
}
if (prop != null) {
return Integer.parseInt(prop);
}
return -1;
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/hex/tree/xgboost/util/PredictConfiguration.java
|
package hex.tree.xgboost.util;
import static water.H2O.OptArgs.SYSTEM_PROP_PREFIX;
public class PredictConfiguration {
public static final String PREDICT_JAVA_PROP = SYSTEM_PROP_PREFIX + "xgboost.predict.java.enable";
public static final String PREDICT_NATIVE_PROP = SYSTEM_PROP_PREFIX + "xgboost.predict.native.enable";
public static boolean useJavaScoring() {
String predictNativePropValue = System.getProperty(PREDICT_NATIVE_PROP);
String predictJavaPropValue = System.getProperty(PREDICT_JAVA_PROP);
if (predictNativePropValue != null) {
return !Boolean.parseBoolean(predictNativePropValue);
} if (predictJavaPropValue != null) {
return Boolean.parseBoolean(predictJavaPropValue);
} else {
return true;
}
}
}
|
0
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/water
|
java-sources/ai/h2o/h2o-ext-xgboost/3.46.0.7/water/tools/XGBoostLibExtractTool.java
|
package water.tools;
import hex.tree.xgboost.XGBoostExtension;
import hex.tree.xgboost.util.NativeLibrary;
import hex.tree.xgboost.util.NativeLibraryLoaderChain;
import java.io.File;
import java.io.IOException;
public class XGBoostLibExtractTool {
public static void main(String[] args) throws IOException {
try {
mainInternal(args);
} catch (IllegalArgumentException e) {
System.err.println((e.getMessage()));
System.exit(1);
}
}
public static void mainInternal(String[] args) throws IOException {
if (args.length != 1) {
throw new IllegalArgumentException("XGBoostLibExtractTool: Specify target directory where to extract XGBoost native libraries.");
}
File dir = new File(args[0]);
if (!dir.exists()) {
throw new IllegalArgumentException("XGBoostLibExtractTool: Directory '" + dir.getAbsolutePath() + "' doesn't exist.");
}
NativeLibraryLoaderChain loader = XGBoostExtension.getLoader();
if (loader == null) {
throw new IllegalArgumentException("XGBoostLibExtractTool: Failed to locate native libraries.");
}
for (NativeLibrary lib : loader.getNativeLibs()) {
if (!lib.isBundled())
continue;
File libFile = lib.extractTo(dir);
System.out.println("Extracted native library: " + libFile.getAbsolutePath());
}
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/ModelCategory.java
|
package hex;
/** Different prediction categories for models.
*
* This code is shared between runtime models and generated models.
*
* NOTE: the values list in the API annotation ModelOutputSchema needs to match. */
public enum ModelCategory {
Unknown,
Binomial,
Multinomial,
Ordinal,
Regression,
Clustering,
AutoEncoder,
TargetEncoder,
DimReduction,
WordEmbedding,
CoxPH,
AnomalyDetection,
KLime,
BinomialUplift
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/package-info.java
|
/**
* Shared code between the H2O runtime and generated POJO and POJO models.
*/
package hex;
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/AbstractMojoWriter.java
|
package hex.genmodel;
import hex.genmodel.algos.isotonic.IsotonicCalibrator;
import hex.genmodel.descriptor.ModelDescriptor;
import hex.genmodel.utils.StringEscapeUtils;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public abstract class AbstractMojoWriter {
/**
* Reference to the model being written. Use this in the subclasses to retreive information from your model.
*/
private ModelDescriptor model;
private String targetdir;
private StringBuilder tmpfile;
private String tmpname;
private ZipOutputStream zos;
// Local key-value store: these values will be written to the model.ini/[info] section
private Map<String, String> lkv;
//--------------------------------------------------------------------------------------------------------------------
// Inheritance interface: ModelMojoWriter subclasses are expected to override these methods to provide custom behavior
//--------------------------------------------------------------------------------------------------------------------
public AbstractMojoWriter(ModelDescriptor model) {
this.model = model;
this.lkv = new LinkedHashMap<>(20); // Linked so as to preserve the order of entries in the output
}
/**
* Version of the mojo file produced. Follows the <code>major.minor</code>
* format, where <code>minor</code> is a 2-digit number. For example "1.00",
* "2.05", "2.13". See README in mojoland repository for more details.
*/
public abstract String mojoVersion();
/**
* Override in subclasses to write the actual model data.
*/
protected abstract void writeModelData() throws IOException;
//--------------------------------------------------------------------------------------------------------------------
// Utility functions: subclasses should use these to implement the behavior they need
//--------------------------------------------------------------------------------------------------------------------
/**
* Write a simple value to the model.ini/[info] section. Here "simple" means a value that can be stringified with
* .toString(), and its stringified version does not span multiple lines.
*/
protected final void writekv(String key, Object value) throws IOException {
String valStr = value == null ? "null" : value.toString();
if (valStr.contains("\n"))
throw new IOException("The `value` must not contain newline characters, got: " + valStr);
if (lkv.containsKey(key))
throw new IOException("Key " + key + " was already written");
lkv.put(key, valStr);
}
protected final void writekv(String key, int[] value) throws IOException {
writekv(key, Arrays.toString(value));
}
protected final void writekv(String key, double[] value) throws IOException {
writekv(key, Arrays.toString(value));
}
protected final void writekv(String key, float[] value) throws IOException {
writekv(key, Arrays.toString(value));
}
protected final void write(IsotonicCalibrator calibrator) throws IOException {
writekv("calib_min_x", calibrator._min_x);
writekv("calib_max_x", calibrator._max_x);
writeblob("calib/thresholds_x", calibrator._thresholds_x);
writeblob("calib/thresholds_y", calibrator._thresholds_y);
}
private void writeblob(String filename, double[] doubles) throws IOException {
ByteBuffer bb = ByteBuffer.wrap(new byte[4 + doubles.length * 8]);
bb.putInt(doubles.length);
for (double val : doubles)
bb.putDouble(val);
writeblob(filename, bb.array());
}
/**
* Write a binary file to the MOJO archive.
*/
protected final void writeblob(String filename, byte[] blob) throws IOException {
ZipEntry archiveEntry = new ZipEntry(targetdir + filename);
archiveEntry.setSize(blob.length);
zos.putNextEntry(archiveEntry);
zos.write(blob);
zos.closeEntry();
}
/**
* Write a text file to the MOJO archive (or rather open such file for writing).
*/
protected final void startWritingTextFile(String filename) {
assert tmpfile == null : "Previous text file was not closed";
tmpfile = new StringBuilder();
tmpname = filename;
}
/**
* Write a single line of text to a previously opened text file, escape new line characters if enabled.
*/
protected final void writeln(String s, boolean escapeNewlines) {
assert tmpfile != null : "No text file is currently being written";
tmpfile.append(escapeNewlines ? StringEscapeUtils.escapeNewlines(s) : s);
tmpfile.append('\n');
}
private void writelnkv(String key, String value, boolean escapeNewlines) {
assert tmpfile != null : "No text file is currently being written";
tmpfile.append(escapeNewlines ? StringEscapeUtils.escapeNewlines(key) : key);
tmpfile.append(" = ");
tmpfile.append(escapeNewlines ? StringEscapeUtils.escapeNewlines(value) : value);
tmpfile.append('\n');
}
protected void writelnkv(String key, String value) {
writelnkv(key, value, false);
}
/**
* Write a single line of text to a previously opened text file.
*/
protected final void writeln(String s) {
writeln(s, false);
}
/**
* Finish writing a text file.
*/
protected final void finishWritingTextFile() throws IOException {
assert tmpfile != null : "No text file is currently being written";
writeblob(tmpname, toBytes(tmpfile));
tmpfile = null;
}
//--------------------------------------------------------------------------------------------------------------------
// Private
//--------------------------------------------------------------------------------------------------------------------
protected void writeTo(ZipOutputStream zos) throws IOException {
writeTo(zos, "");
}
public final void writeTo(ZipOutputStream zos, String zipDirectory) throws IOException {
initWriting(zos, zipDirectory);
addCommonModelInfo();
writeModelData();
writeModelInfo();
writeDomains();
writeExtraInfo();
}
protected void writeExtraInfo() throws IOException {
// nothing by default
}
private void initWriting(ZipOutputStream zos, String targetdir) {
this.zos = zos;
this.targetdir = targetdir;
}
private void addCommonModelInfo() throws IOException {
int n_categoricals = 0;
for (String[] domain : model.scoringDomains())
if (domain != null)
n_categoricals++;
writekv("h2o_version", model.projectVersion());
writekv("mojo_version", mojoVersion());
writekv("license", "Apache License Version 2.0");
writekv("algo", model.algoName().toLowerCase());
writekv("algorithm", model.algoFullName());
writekv("endianness", ByteOrder.nativeOrder());
writekv("category", model.getModelCategory());
writekv("uuid", model.uuid());
writekv("supervised", model.isSupervised());
writekv("n_features", model.nfeatures());
writekv("n_classes", model.nclasses());
writekv("n_columns", model.columnNames().length);
writekv("n_domains", n_categoricals);
if (model.offsetColumn() != null) {
writekv("offset_column", model.offsetColumn());
}
if (model.foldColumn() != null) {
writekv("fold_column", model.foldColumn());
}
writekv("balance_classes", model.balanceClasses());
writekv("default_threshold", model.defaultThreshold());
writekv("prior_class_distrib", Arrays.toString(model.priorClassDist()));
writekv("model_class_distrib", Arrays.toString(model.modelClassDist()));
writekv("timestamp", model.timestamp());
writekv("escape_domain_values", true); // Without escaping, there is no way to represent multiline categoricals as one-line values.
}
/**
* Create the model.ini file containing 3 sections: [info], [columns] and [domains]. For example:
* [info]
* algo = Random Forest
* n_trees = 100
* n_columns = 25
* n_domains = 3
* ...
* h2o_version = 3.9.10.0
* <p>
* [columns]
* col1
* col2
* ...
* <p>
* [domains]
* 5: 13 d000.txt
* 6: 7 d001.txt
* 12: 124 d002.txt
* <p>
* Here the [info] section lists general model information; [columns] is the list of all column names in the input
* dataframe; and [domains] section maps column numbers (for categorical features) to their domain definition files
* together with the number of categories to be read from that file.
*/
private void writeModelInfo() throws IOException {
startWritingTextFile("model.ini");
writeln("[info]");
for (Map.Entry<String, String> kv : lkv.entrySet()) {
writelnkv(kv.getKey(), kv.getValue());
}
writeln("\n[columns]");
for (String name : model.columnNames()) {
writeln(name);
}
writeln("\n[domains]");
String format = "%d: %d d%03d.txt";
int domIndex = 0;
String[][] domains = model.scoringDomains();
for (int colIndex = 0; colIndex < domains.length; colIndex++) {
if (domains[colIndex] != null)
writeln(String.format(format, colIndex, domains[colIndex].length, domIndex++));
}
finishWritingTextFile();
}
/**
* Create files containing domain definitions for each categorical column.
*/
protected void writeDomains() throws IOException {
int domIndex = 0;
for (String[] domain : model.scoringDomains()) {
if (domain == null) continue;
writeStringArray(domain, String.format("domains/d%03d.txt", domIndex++));
}
}
protected void writeStringArray(String[] array, String filename) throws IOException {
startWritingTextFile(filename);
for (String value : array) {
writeln(value, true);
}
finishWritingTextFile();
}
private static byte[] toBytes(Object value) {
return String.valueOf(value).getBytes(Charset.forName("UTF-8"));
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/CategoricalEncoding.java
|
package hex.genmodel;
import hex.genmodel.easy.*;
import java.util.Map;
public enum CategoricalEncoding {
AUTO(false) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new EnumEncoderColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new EnumEncoderDomainMapConstructor(m, columnMapping).create();
}
},
OneHotExplicit(false) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new OneHotEncoderColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new OneHotEncoderDomainMapConstructor(m, columnMapping).create();
}
},
Binary(false) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new BinaryColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new BinaryDomainMapConstructor(m, columnMapping).create();
}
},
EnumLimited(true) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new EnumLimitedEncoderColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new EnumLimitedEncoderDomainMapConstructor(m, columnMapping).create();
}
},
Eigen(true) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new EigenEncoderColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new EigenEncoderDomainMapConstructor(m, columnMapping).create();
}
},
LabelEncoder(false) {
@Override
public Map<String, Integer> createColumnMapping(GenModel m) {
return new EnumEncoderColumnMapper(m).create();
}
@Override
public Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping) {
return new LabelEncoderDomainMapConstructor(m, columnMapping).create();
}
};
private final boolean _parametrized;
CategoricalEncoding(boolean parametrized) {
_parametrized = parametrized;
}
public abstract Map<String, Integer> createColumnMapping(GenModel m);
public abstract Map<Integer, CategoricalEncoder> createCategoricalEncoders(GenModel m, Map<String, Integer> columnMapping);
/**
* Does the categorical encoding have any parameters that are needed to correctly interpret it?
* Eg.: number of classes for EnumLimited
*
* @return Is this encoding parametrized?
*/
public boolean isParametrized() {
return _parametrized;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/ConverterFactoryProvidingModel.java
|
package hex.genmodel;
import hex.genmodel.easy.CategoricalEncoder;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowToRawDataConverter;
import java.util.Map;
public interface ConverterFactoryProvidingModel {
/**
* @return A new instance of {@link RowToRawDataConverter} related to the underlying {@link hex.genmodel.GenModel}
*/
RowToRawDataConverter makeConverterFactory(Map<String, Integer> modelColumnNameToIndexMap,
Map<Integer, CategoricalEncoder> domainMap,
EasyPredictModelWrapper.ErrorConsumer errorConsumer,
EasyPredictModelWrapper.Config config);
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/FolderMojoReaderBackend.java
|
package hex.genmodel;
import java.io.*;
/**
*/
class FolderMojoReaderBackend implements MojoReaderBackend {
private String root;
public FolderMojoReaderBackend(String folder) {
root = folder;
}
@Override
public BufferedReader getTextFile(String filename) throws IOException {
File f = new File(root, filename);
FileReader fr = new FileReader(f);
return new BufferedReader(fr);
}
@Override
public byte[] getBinaryFile(String filename) throws IOException {
File f = new File(root, filename);
byte[] out = new byte[(int) f.length()];
DataInputStream dis = new DataInputStream(new FileInputStream(f));
try {
dis.readFully(out);
dis.close();
} finally {
try { dis.close(); } catch (IOException e) { /* ignored */ }
}
return out;
}
@Override
public boolean exists(String filename) {
return new File(root, filename).exists();
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/GenModel.java
|
package hex.genmodel;
import hex.ModelCategory;
import water.genmodel.IGeneratedModel;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.*;
/**
* This is a helper class to support Java generated models.
*/
public abstract class GenModel implements IGenModel, IGeneratedModel, Serializable {
/** Column names; last is response for supervised models */
public final String[] _names;
/** Categorical (factor/enum) mappings, per column. Null for non-enum cols.
* Columns match the post-init cleanup columns. The last column holds the
* response col enums for SupervisedModels. */
public final String[][] _domains;
/** Name of the response column used for training (only for supervised models). */
public final String _responseColumn;
/** Name of the column with offsets (used for certain types of models). */
public String _offsetColumn;
public String _foldColumn;
/** Name of the column determine treatment group, currently only for UpliftDRF models */
public String _treatmentColumn;
public GenModel(String[] names, String[][] domains, String responseColumn) {
_names = names;
_domains = domains;
_responseColumn = responseColumn;
_treatmentColumn = null;
}
public GenModel(String[] names, String[][] domains, String responseColumn, String treatmentColumn) {
_names = names;
_domains = domains;
_responseColumn = responseColumn;
_treatmentColumn = treatmentColumn;
}
/**
* @deprecated This constructor is deprecated and will be removed in a future version.
* use {@link #GenModel(String[] names, String[][] domains, String responseColumn)()} instead.
*/
@Deprecated
public GenModel(String[] names, String[][] domains) {
this(names, domains, null);
}
public boolean requiresOffset() {
return false;
}
//--------------------------------------------------------------------------------------------------------------------
// IGenModel interface
//--------------------------------------------------------------------------------------------------------------------
/** Returns true for supervised models. */
@Override public boolean isSupervised() {
return false;
}
/** Returns number of input features. */
@Override public int nfeatures() {
return _names.length;
}
public int nCatFeatures() {
int nCat = 0;
String[][] domainValues = getDomainValues();
for (int i = 0; i < nfeatures(); i++) {
if (domainValues[i] != null)
nCat++;
}
return nCat;
}
/** Returns names of input features. */
@Override public String[] features() {
return Arrays.copyOf(_names, nfeatures());
}
/** Returns number of output classes for classifiers, 1 for regression models, and 0 for unsupervised models. */
@Override public int nclasses() {
return 0;
}
/** Returns this model category. */
@Override public abstract ModelCategory getModelCategory();
public String[] getOutputNames() {
final ModelCategory category = getModelCategory();
final String[] outputNames;
// Emit outputCSV column names.
switch (category) {
case AutoEncoder:
List<String> onames = new LinkedList<>();
final String[] cnames = getNames();
final int numCats = nCatFeatures();
final String[][] domainValues = getDomainValues();
for (int index = 0; index <= numCats - 1; index++) { // add names for categorical columns
String[] tdomains = domainValues[index];
int tdomainLen = tdomains.length-1;
for (int index2 = 0; index2 <= tdomainLen; index2++) {
onames.add("reconstr_" + cnames[index] + "." + tdomains[index2]);
}
onames.add("reconstr_" + cnames[index] + ".missing(NA)");
}
for (int index = numCats; index < cnames.length; index++) { // add the numerical column names
onames.add("reconstr_" + cnames[index]);
}
outputNames = onames.toArray(new String[0]);
break;
case Binomial:
case Multinomial:
case Ordinal:
final String[] responseDomainValues = getDomainValues(getResponseIdx());
outputNames = new String[1 + responseDomainValues.length];
outputNames[0] = "predict";
System.arraycopy(responseDomainValues, 0, outputNames, 1, outputNames.length - 1);
// turn integer class labels such as 0, 1, etc. into p0, p1, etc.
for (int i = 1; i < outputNames.length; i++) {
try {
Integer.valueOf(outputNames[i]);
outputNames[i] = "p" + outputNames[i];
} catch (Exception e) {
// do nothing, non-integer names are fine already
}
}
break;
case Clustering:
outputNames = new String[]{"cluster"};
break;
case Regression:
outputNames = new String[]{"predict"};
break;
case CoxPH:
outputNames = new String[]{"lp"};
break;
case BinomialUplift:
outputNames = new String[]{"uplift_predict", "p_y1_with_treatment", "p_y1_without_treatment"};
break;
default:
throw new UnsupportedOperationException("Getting output column names for model category '" +
category + "' is not supported.");
}
return outputNames;
}
/**
* Companion method to getOutputNames. For each output column specifies
* what is the domain of the column.
*
* @return array of domain values for each output column, if the type of the column is not categorical use null
*/
public String[][] getOutputDomains() {
final ModelCategory category = getModelCategory();
final String[][] outputDomains = new String[getOutputNames().length][];
switch (category) {
case Binomial:
case Multinomial:
case Ordinal:
outputDomains[0] = getDomainValues(getResponseIdx());
break;
case Regression:
case Clustering:
case AutoEncoder:
case TargetEncoder:
case DimReduction:
case WordEmbedding:
case CoxPH:
case AnomalyDetection:
case KLime:
case BinomialUplift:
// all numeric, nothing to set here
break;
default:
throw new UnsupportedOperationException("Getting output domains for model category '" +
category + "' is not yet supported.");
}
return outputDomains;
}
/** Override this for models that may produce results in different categories. */
@Override public EnumSet<ModelCategory> getModelCategories() {
return EnumSet.of(getModelCategory());
}
//--------------------------------------------------------------------------------------------------------------------
// IGeneratedModel interface
//--------------------------------------------------------------------------------------------------------------------
@Override public abstract String getUUID();
/** Returns number of columns used as input for training (i.e., exclude response and offset columns). */
@Override public int getNumCols() {
return nfeatures();
}
/** The names of all columns used, including response and offset columns. */
@Override public String[] getNames() {
return _names;
}
public int getOrigNumCols() {
String[] origNames = getOrigNames();
if (origNames == null || origNames.length == 0)
return 0;
boolean hasResponse = false;
if (isSupervised()) {
String responseName = getResponseName();
hasResponse = origNames[origNames.length - 1].equals(responseName);
}
return hasResponse ? origNames.length - 1 : origNames.length;
}
/** The original names of all columns used, including response and offset columns. */
@Override public String[] getOrigNames() {
return null;
}
/** The name of the response column. */
@Override public String getResponseName() {
// Note: _responseColumn is not set when deprecated constructor GenModel(String[] names, String[][] domains) is used
int r = getResponseIdx();
return r < _names.length ? _names[r] : _responseColumn;
}
/** Returns the index of the response column inside getDomains(). */
@Override public int getResponseIdx() {
if (!isSupervised())
throw new UnsupportedOperationException("Cannot provide response index for unsupervised models.");
return _domains.length - 1;
}
@Override public String getOffsetName() {
return _offsetColumn;
}
/** Get number of classes in the given column.
* Return number greater than zero if the column is categorical or -1 if the column is numeric. */
@Override public int getNumClasses(int colIdx) {
String[] domval = getDomainValues(colIdx);
return domval != null? domval.length : -1;
}
/** Return a number of classes in response column. */
@Override public int getNumResponseClasses() {
if (!isClassifier())
throw new UnsupportedOperationException("Cannot provide number of response classes for non-classifiers.");
return nclasses();
}
/** Return type of encoding expected by the model implementation. */
@Override public CategoricalEncoding getCategoricalEncoding() {
return CategoricalEncoding.AUTO; // by default model handles the encoding
}
/** Returns true if this model represents a classifier, else it is used for regression. */
@Override public boolean isClassifier() {
ModelCategory cat = getModelCategory();
return cat == ModelCategory.Binomial || cat == ModelCategory.Multinomial || cat == ModelCategory.Ordinal;
}
/** Returns true if this model represents an AutoEncoder. */
@Override public boolean isAutoEncoder() {
return getModelCategory() == ModelCategory.AutoEncoder;
}
/** Gets domain of the given column. */
@Override public String[] getDomainValues(String name) {
int colIdx = getColIdx(name);
return colIdx != -1 ? getDomainValues(colIdx) : null;
}
/** Returns domain values for the i-th column. */
@Override public String[] getDomainValues(int i) {
return getDomainValues()[i];
}
/** Returns domain values for all columns, including the response column. */
@Override public String[][] getDomainValues() {
return _domains;
}
@Override
public String[][] getOrigDomainValues() {
return null;
}
/** Returns original Eigen encoder projections array for all columns. */
@Override
public double[] getOrigProjectionArray() {return null;}
/** Returns index of a column with given name, or -1 if the column is not found. */
@Override public int getColIdx(String name) {
String[] names = getNames();
for (int i = 0; i < names.length; i++) if (names[i].equals(name)) return i;
return -1;
}
/** Maps given column's categorical to the integer used by this model (returns -1 if mapping not found). */
@Override public int mapEnum(int colIdx, String enumValue) {
String[] domain = getDomainValues(colIdx);
if (domain != null)
for (int i = 0; i < domain.length; i++)
if (enumValue.equals(domain[i]))
return i;
return -1;
}
/** Returns the expected size of preds array which is passed to `predict(double[], double[])` function. */
@Override public int getPredsSize() { // fractional binomial has numerical response
return isClassifier() ? (1 + getNumResponseClasses()) : 2;
}
public int getPredsSize(ModelCategory mc) {
return (mc == ModelCategory.DimReduction)? nclasses() :getPredsSize();
}
public static String createAuxKey(String k) {
return k + ".aux";
}
/*
@Override
public float[] predict(double[] data, float[] preds) {
return predict(data, preds, 0);
}
@Override
public float[] predict(double[] data, float[] preds, int maxIters) {
throw new UnsupportedOperationException("Unsupported operation - use score0 method!");
}
*/
//--------------------------------------------------------------------------------------------------------------------
/** Takes a HashMap mapping column names to doubles.
* <p>
* Looks up the column names needed by the model, and places the doubles into
* the data array in the order needed by the model. Missing columns use NaN.
* </p>
*/
/*
public double[] map(Map<String, Double> row, double data[]) {
for (int i = 0; i < nfeatures(); i++) {
Double d = row.get(_names[i]);
data[i] = d==null ? Double.NaN : d;
}
return data;
}
*/
/** Subclasses implement the scoring logic. The data is pre-loaded into a
* re-used temp array, in the order the model expects. The predictions are
* loaded into the re-used temp array, which is also returned. This call
* exactly matches the hex.Model.score0, but uses the light-weight
* GenModel class. */
public abstract double[] score0(double[] row, double[] preds);
public double[] score0(double[] row, double offset, double[] preds) {
throw new UnsupportedOperationException("`offset` column is not supported");
}
/** Subclasses implement calibration of class probabilities. The input is array of
* predictions returned by the scoring function (score0). Supports classification
* models that were trained with calibration enabled. Original probabilities
* in the predictions array are overwritten by their corresponding calibrated
* counterparts. Return false if model doesn't support calibration.
*/
public boolean calibrateClassProbabilities(double preds[]) {
return false;
}
/*
// Does the mapping lookup for every row, no allocation.
// data and preds arrays are pre-allocated and can be re-used for every row.
public double[] score0(Map<String, Double> row, double[] data, double[] preds) {
Double offset = _offsetColumn == null? null : row.get(_offsetColumn);
return score0(map(row, data), offset == null? 0.0 : offset, preds);
}
// Does the mapping lookup for every row.
// preds array is pre-allocated and can be re-used for every row.
// Allocates a double[] for every row.
public double[] score0(Map<String, Double> row, double[] preds) {
return score0(row, new double[nfeatures()], preds);
}
// Does the mapping lookup for every row.
// Allocates a double[] and a float[] for every row.
public double[] score0(Map<String, Double> row) {
return score0(row, new double[nfeatures()], new double[nclasses()+1]);
}
*/
/**
* Correct a given list of class probabilities produced as a prediction by a model back to prior class distribution
*
* <p>The implementation is based on Eq. (27) in <a href="http://gking.harvard.edu/files/0s.pdf">the paper</a>.
*
* @param scored list of class probabilities beginning at index 1
* @param priorClassDist original class distribution
* @param modelClassDist class distribution used for model building (e.g., data was oversampled)
* @return corrected list of probabilities
*/
public static double[] correctProbabilities(double[] scored, double[] priorClassDist, double[] modelClassDist) {
double probsum=0;
for( int c=1; c<scored.length; c++ ) {
final double original_fraction = priorClassDist[c-1];
final double oversampled_fraction = modelClassDist[c-1];
assert(!Double.isNaN(scored[c])) : "Predicted NaN class probability";
if (original_fraction != 0 && oversampled_fraction != 0) scored[c] *= original_fraction / oversampled_fraction;
probsum += scored[c];
}
if (probsum>0) for (int i=1;i<scored.length;++i) scored[i] /= probsum;
return scored;
}
/** Utility function to get a best prediction from an array of class
* prediction distribution. It returns the index of the max. probability (if that exists).
* In the case of ties, it samples from the tied classes with the likelihood given by the prior probabilities.
* @param preds an array of prediction distribution. Length of arrays is equal to a number of classes+1.
* @param priorClassDist prior class probabilities (used to break ties)
* @param data Test data
* @param threshold threshold for binary classifier
* @return the best prediction (index of class, zero-based)
*/
public static int getPrediction(double[] preds, double[] priorClassDist, double[] data, double threshold) {
if (preds.length == 3) {
return getPredictionBinomial(preds, threshold);
} else {
return getPredictionMultinomial(preds, priorClassDist, data);
}
}
public static int getPredictionBinomial(double[] preds, double threshold) {
return (preds[2] >= threshold) ? 1 : 0; //no tie-breaking
}
public static int getPredictionMultinomial(double[] preds, double[] priorClassDist, double[] data) {
List<Integer> ties = new ArrayList<>();
ties.add(0);
int best=1, tieCnt=0; // Best class; count of ties
for( int c=2; c<preds.length; c++) {
if( preds[best] < preds[c] ) {
best = c; // take the max index
tieCnt=0; // No ties
} else if (preds[best] == preds[c]) {
tieCnt++; // Ties
ties.add(c-1);
}
}
if( tieCnt==0 ) return best-1; // Return zero-based best class
long hash = 0; // hash for tie-breaking
if( data != null )
for( double d : data ) hash ^= Double.doubleToRawLongBits(d) >> 6; // drop 6 least significants bits of mantissa (layout of long is: 1b sign, 11b exp, 52b mantisa)
if (priorClassDist!=null) {
assert(preds.length==priorClassDist.length+1);
// Tie-breaking based on prior probabilities
// Example: probabilities are 0.4, 0.2, 0.4 for a 3-class problem with priors 0.7, 0.1, 0.2
// Probability of predicting class 1 should be higher than for class 3 based on the priors
double sum = 0;
for (Integer i : ties) { //ties = [0, 2]
sum += priorClassDist[i]; //0.7 + 0.2
}
// sum is now 0.9
Random rng = new Random(hash);
double tie = rng.nextDouble(); //for example 0.4135 -> should pick the first of the ties, since it occupies 0.7777 = 0.7/0.9 of the 0...1 range, and 0.4135 < 0.7777
double partialSum = 0;
for (Integer i : ties) {
partialSum += priorClassDist[i] / sum; //0.7777 at first iteration, 1.0000 at second iteration
if (tie <= partialSum)
return i;
}
}
// Tie-breaking logic (should really never be triggered anymore)
double res = preds[best]; // One of the tied best results
int idx = (int)hash%(tieCnt+1); // Which of the ties we'd like to keep
for( best=1; best<preds.length; best++)
if( res == preds[best] && --idx < 0 )
return best-1; // Return best
throw new RuntimeException("Should Not Reach Here");
}
// Utility to do bitset lookup from a POJO
public static boolean bitSetContains(byte[] bits, int nbits, int bitoff, double dnum) {
assert(!Double.isNaN(dnum));
int idx = (int)dnum;
idx -= bitoff;
assert (idx >= 0 && idx < nbits): "Must have "+bitoff+" <= idx <= " + (bitoff+nbits-1) + ": " + idx;
return (bits[idx >> 3] & ((byte)1 << (idx & 7))) != 0;
}
public static boolean bitSetIsInRange(int nbits, int bitoff, double dnum) {
assert(!Double.isNaN(dnum));
int idx = (int)dnum;
idx -= bitoff;
return (idx >= 0 && idx < nbits);
}
// Todo: Done for K-means but we should really unify for all models.
public static void Kmeans_preprocessData(double [] data, double [] means, double [] mults, int[] modes){
for(int i = 0; i < data.length; i++) {
data[i] = Kmeans_preprocessData(data[i], i, means, mults, modes);
}
}
public static double Kmeans_preprocessData(double d, int i, double [] means, double [] mults, int[] modes){
if(modes[i] == -1) { // Mode = -1 for non-categorical cols
if( Double.isNaN(d) )
d = means[i];
if( mults != null ) {
d -= means[i];
d *= mults[i];
}
} else {
if( Double.isNaN(d) )
d = modes[i];
}
return d;
}
// --------------------------------------------------------------------------
// KMeans utilities
// For KMeansModel scoring; just the closest cluster center
public static int KMeans_closest(double[][] centers, double[] point, String[][] domains) {
int min = -1;
double minSqr = Double.MAX_VALUE;
for( int cluster = 0; cluster < centers.length; cluster++ ) {
double sqr = KMeans_distance(centers[cluster],point,domains);
if( sqr < minSqr ) { // Record nearest cluster center
min = cluster;
minSqr = sqr;
}
}
return min;
}
// Outputs distances from a given point to all cluster centers, returns index of the closest cluster center
public static int KMeans_distances(double[][] centers, double[] point, String[][] domains, double[] distances) {
int min = -1;
double minSqr = Double.MAX_VALUE;
for (int cluster = 0; cluster < centers.length; cluster++) {
distances[cluster] = KMeans_distance(centers[cluster], point, domains);
if (distances[cluster] < minSqr) { // Record nearest cluster center
min = cluster;
minSqr = distances[cluster];
}
}
return min;
}
// only used for GLRM initialization - inverse of distance to each cluster center normalized to sum to one
public static double[] KMeans_simplex(double[][] centers, double[] point, String[][] domains) {
double[] dist = new double[centers.length];
double sum = 0, inv_sum = 0;
for( int cluster = 0; cluster < centers.length; cluster++ ) {
dist[cluster] = KMeans_distance(centers[cluster],point,domains);
sum += dist[cluster];
inv_sum += 1.0 / dist[cluster];
}
double[] ratios = new double[centers.length];
if (sum == 0) { // In degenerate case where all cluster centers identical to point, pick one at random
Random rng = new Random();
int idx = rng.nextInt(centers.length);
ratios[idx] = 1;
} else {
// Is the point identical to an existing cluster center?
int idx = -1;
for (int cluster = 0; cluster < centers.length; cluster++) {
if(dist[cluster] == 0) {
idx = cluster;
break;
}
}
if(idx == -1) { // If not, take ratios as inverse of cluster distance normalized to sum to one
for (int cluster = 0; cluster < centers.length; cluster++)
ratios[cluster] = 1.0 / (dist[cluster] * inv_sum);
} else // Otherwise, just assign directly to closest cluster
ratios[idx] = 1;
}
return ratios;
}
// only used for metric builder - uses float[] and fills up colSum & colSumSq arrays, otherwise the same as method below.
// WARNING - if changing this code - also change the code below
public static double KMeans_distance(double[] center, float[] point, int [] modes,
double[] colSum, double[] colSumSq) {
double sqr = 0; // Sum of dimensional distances
int pts = point.length; // Count of valid points
for(int column = 0; column < center.length; column++) {
float d = point[column];
if( Float.isNaN(d) ) { pts--; continue; }
if( modes[column] != -1 ) { // Categorical?
if( d != center[column] ) {
sqr += 1.0; // Manhattan distance
}
if(d != modes[column]) {
colSum[column] += 1;
}
} else { // Euclidean distance
double delta = d - center[column];
sqr += delta * delta;
colSum[column] += d;
colSumSq[column] += d*d;
}
}
// Scale distance by ratio of valid dimensions to all dimensions - since
// we did not add any error term for the missing point, the sum of errors
// is small - ratio up "as if" the missing error term is equal to the
// average of other error terms. Same math another way:
// double avg_dist = sqr / pts; // average distance per feature/column/dimension
// sqr = sqr * point.length; // Total dist is average*#dimensions
if( 0 < pts && pts < point.length ) {
double scale = ((double) point.length) / pts;
sqr *= scale;
// for (int i=0; i<colSum.length; ++i) {
// colSum[i] *= Math.sqrt(scale);
// colSumSq[i] *= scale;
// }
}
return sqr;
}
// WARNING - if changing this code - also change the code above
public static double KMeans_distance(double[] center, double[] point,String[][] domains) {
double sqr = 0; // Sum of dimensional distances
int pts = point.length; // Count of valid points
for(int column = 0; column < center.length; column++) {
double d = point[column];
if( Double.isNaN(d) ) { pts--; continue; }
if( domains[column] != null ) { // Categorical?
if( d != center[column] )
sqr += 1.0; // Manhattan distance
} else { // Euclidean distance
double delta = d - center[column];
sqr += delta * delta;
}
}
// Scale distance by ratio of valid dimensions to all dimensions - since
// we did not add any error term for the missing point, the sum of errors
// is small - ratio up "as if" the missing error term is equal to the
// average of other error terms. Same math another way:
// double avg_dist = sqr / pts; // average distance per feature/column/dimension
// sqr = sqr * point.length; // Total dist is average*#dimensions
if( 0 < pts && pts < point.length )
sqr *= ((double) point.length) / pts;
return sqr;
}
// --------------------------------------------------------------------------
// SharedTree utilities
// Build a class distribution from a log scale.
// Because we call Math.exp, we have to be numerically stable or else we get
// Infinities, and then shortly NaN's. Rescale the data so the largest value
// is +/-1 and the other values are smaller. See notes here:
// http://www.hongliangjie.com/2011/01/07/logsum/
public static double log_rescale(double[] preds) {
// Find a max
double maxval=Double.NEGATIVE_INFINITY;
for( int k=1; k<preds.length; k++) maxval = Math.max(maxval,preds[k]);
assert !Double.isInfinite(maxval) : "Something is wrong with GBM trees since returned prediction is " + Arrays.toString(preds);
// exponentiate the scaled predictions; keep a rolling sum
double dsum=0;
for( int k=1; k<preds.length; k++ )
dsum += (preds[k]=Math.exp(preds[k]-maxval));
return dsum; // Return rolling sum; predictions are log-scaled
}
// Build a class distribution from a log scale; find the top prediction
public static void GBM_rescale(double[] preds) {
double sum = log_rescale(preds);
for (int k = 1; k < preds.length; k++)
preds[k] /= sum;
}
// --------------------------------------------------------------------------
// GLM utilities
public static double GLM_identityInv( double x ) { return x; }
public static double GLM_logitInv( double x ) { return 1.0 / (Math.exp(-x) + 1.0); }
public static double GLM_logInv( double x ) { return Math.exp(x); }
public static double GLM_inverseInv( double x ) { double xx = (x < 0) ? Math.min(-1e-5, x) : Math.max(1e-5, x); return 1.0 / xx; }
public static double GLM_ologitInv(double x) {
return GLM_logitInv(x);
}
public static double GLM_tweedieInv( double x, double tweedie_link_power ) {
return tweedie_link_power == 0?Math.max(2e-16,Math.exp(x)):Math.pow(x, 1.0/ tweedie_link_power);
}
/** ??? */
public String getHeader() { return null; }
// Helper for XGBoost Native (models that require explicit one-hot encoding on the fly)
static public void setInput(final double[] from, float[] to, int _nums, int _cats, int[] _catOffsets, double[] _normMul, double[] _normSub, boolean useAllFactorLevels, boolean replaceMissingWithZero) {
double[] nums = new double[_nums]; // a bit wasteful - reallocated each time
int[] cats = new int[_cats]; // a bit wasteful - reallocated each time
setCats(from, nums, cats, _cats, _catOffsets, _normMul, _normSub, useAllFactorLevels);
assert(to.length == _nums + _catOffsets[_cats]);
Arrays.fill(to, 0f);
for (int i = 0; i < _cats; ++i)
if (cats[i] >= 0)
to[cats[i]] = 1f; // one-hot encode categoricals
for (int i = 0; i < _nums; ++i)
to[_catOffsets[_cats] + i] = Double.isNaN(nums[i]) ? (replaceMissingWithZero ? 0 : Float.NaN) : (float)nums[i];
}
// Helper for Deeplearning, note: we assume nums and cats are allocated already and being re-used
static public void setInput(final double[] from, double[] to, double[] nums, int[] cats, int _nums, int _cats,
int[] _catOffsets, double[] _normMul, double[] _normSub, boolean useAllFactorLevels, boolean replaceMissingWithZero) {
setCats(from, nums, cats, _cats, _catOffsets, _normMul, _normSub, useAllFactorLevels);
assert(to.length == _nums + _catOffsets[_cats]);
Arrays.fill(to, 0d);
for (int i = 0; i < _cats; ++i)
if (cats[i] >= 0)
to[cats[i]] = 1d; // one-hot encode categoricals
for (int i = 0; i < _nums; ++i)
to[_catOffsets[_cats] + i] = Double.isNaN(nums[i]) ? (replaceMissingWithZero ? 0 : Double.NaN) : nums[i];
}
// Helper for XGBoost Java
static public void setCats(final double[] from, double[] nums, int[] cats, int _cats, int[] _catOffsets,
double[] _normMul, double[] _normSub, boolean useAllFactorLevels) {
setCats(from, cats, _cats, _catOffsets, useAllFactorLevels);
for (int i = _cats; i < from.length; ++i) {
double d = from[i];
if ((_normMul != null) && (_normMul.length >0)) {
d = (d - _normSub[i - _cats]) * _normMul[i - _cats];
}
nums[i - _cats] = d; //can be NaN for missing numerical data
}
}
static public void setCats(final double[] from, int[] to, int cats, int[] catOffsets, boolean useAllFactorLevels) {
for (int i = 0; i < cats; ++i) {
if (Double.isNaN(from[i])) {
to[i] = (catOffsets[i + 1] - 1); //use the extra level for NAs made during training
} else {
int c = (int) from[i];
if (useAllFactorLevels)
to[i] = c + catOffsets[i];
else {
if (c != 0)
to[i] = c - 1 + catOffsets[i];
else
to[i] = -1;
}
if (to[i] >= catOffsets[i + 1])
to[i] = (catOffsets[i + 1] - 1);
}
}
}
public static float[] convertDouble2Float(double[] input) {
int arraySize = input.length;
float[] output = new float[arraySize];
for (int index=0; index<arraySize; index++)
output[index] = (float) input[index];
return output;
}
public static void img2pixels(BufferedImage img, int w, int h, int channels, float[] pixels, int start, float[] mean) throws IOException {
// resize the image
BufferedImage scaledImg = new BufferedImage(w, h, img.getType());
Graphics2D g2d = scaledImg.createGraphics();
g2d.drawImage(img, 0, 0, w, h, null);
g2d.dispose();
int r_idx = start;
int g_idx = r_idx + w * h;
int b_idx = g_idx + w * h;
for (int i = 0; i < h; i++) {
for (int j = 0; j < w; j++) {
Color mycolor = new Color(scaledImg.getRGB(j, i));
int red = mycolor.getRed();
int green = mycolor.getGreen();
int blue = mycolor.getBlue();
if (channels==1) {
pixels[r_idx] = (red+green+blue)/3;
if (mean!=null) {
pixels[r_idx] -= mean[r_idx];
}
} else {
pixels[r_idx] = red;
pixels[g_idx] = green;
pixels[b_idx] = blue;
if (mean!=null) {
pixels[r_idx] -= mean[r_idx-start];
pixels[g_idx] -= mean[g_idx-start];
pixels[b_idx] -= mean[b_idx-start];
}
}
r_idx++;
g_idx++;
b_idx++;
}
}
}
/**
* For internal use only - can be removed at any time!
*
* Creates a version of the MOJO that can be used by a thread in a multi-threaded environment.
* This is a temporary workaround and proper fix should be put in place for the MOJOs that are not currently
* thread safe (GAM).
*/
@Deprecated
public GenModel internal_threadSafeInstance() {
return this;
}
}
|
0
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex
|
java-sources/ai/h2o/h2o-genmodel/3.46.0.7/hex/genmodel/GenMunger.java
|
package hex.genmodel;
import hex.genmodel.easy.RowData;
import java.io.Serializable;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class GenMunger implements Serializable {
public Step[] _steps;
public String[] inTypes() { return _steps[0].types(); }
public String[] inNames() { return _steps[0].names(); }
public String[] outNames() { return _steps[_steps.length-1].outNames(); }
public abstract class Step<T> implements Serializable {
private final String[] _names;
private final String[] _types;
private final String[] _outNames;
protected HashMap<String, String[]> _params;
public abstract RowData transform(RowData row);
public Step(String[] inNames, String[] inTypes, String[] outNames) {_names=inNames; _types=inTypes; _outNames=outNames; _params = new HashMap<>(); }
public String[] outNames() { return _outNames; }
public String[] names() { return _names; }
public String[] types() { return _types; }
public HashMap<String,String[]> params() { return _params; }
}
public RowData fit(RowData row) {
if( row==null ) return null;
for(Step s: _steps)
row = s.transform(row);
return row;
}
public RowData fillDefault(String[] vals) {
RowData row = new RowData();
String[] types = inTypes();
String[] names = inNames();
for(int i=0;i<types.length;++i)
row.put(names[i],vals==null?null:valueOf(types[i],vals[i]));
return row;
}
private static Double parseNum(String n){
if(n==null || n.equals("") || n.isEmpty()) return Double.NaN;
return Double.valueOf(n);
}
private static Object valueOf(String type, String val) {
val = val.replaceAll("^\"|\"$", ""); // strip any bounding quotes
return type.equals("Numeric")
? parseNum(val)
: val;
}
// currents/transforms/GenMunger utilities
public static void scaleInPlace(final double[] means, final double[] mults, double[] in) {
for(int i=0; i<in.length; ++i)
in[i] = (in[i]-means[i])*mults[i];
}
public static double cos(double r, HashMap<String,String[]> parameters) { return Math.cos(r); }
public static double sin(double r, HashMap<String,String[]> parameters) { return Math.sin(r); }
public static double countmatches(String s, HashMap<String,String[]> parameters) {
String[] patterns = parameters.get("pattern");
return countMatches(s, patterns);
}
private static int countMatches(String s, String[] pattern) {
int cnt=0;
for(String pat: pattern) {
Pattern p = Pattern.compile(pat);
Matcher m = p.matcher(s);
while(m.find()) cnt++;
}
return cnt;
}
public static double add(double d, HashMap<String, String[]> parameters) {
String[] leftArg = parameters.get("leftArg");
String[] riteArg = parameters.get("rightArg");
if( riteArg!=null ) return d + parseNum(riteArg[0]);
return parseNum(leftArg[0]) + d;
}
public static double minus(double d, HashMap<String, String[]> parameters) {
String[] leftArg = parameters.get("leftArg");
String[] riteArg = parameters.get("rightArg");
if( riteArg!=null ) return d - parseNum(riteArg[0]);
return parseNum(leftArg[0]) - d;
}
public static double multiply(double d, HashMap<String,String[]> parameters) {
String[] leftArg = parameters.get("leftArg");
String[] riteArg = parameters.get("rightArg");
if( riteArg!=null ) return d * parseNum(riteArg[0]);
return parseNum(leftArg[0]) * d;
}
public static double divide(double d, HashMap<String,String[]> parameters) {
String[] leftArg = parameters.get("leftArg");
String[] rightArg = parameters.get("rightArg");
if( rightArg!=null ) return d / parseNum(rightArg[0]);
return parseNum(leftArg[0]) / d;
}
public static double mod(double d, HashMap<String,String[]> parameters) {
String leftArg = parameters.get("leftArg")[0];
String rightArg = parameters.get("rightArg")[0];
if( rightArg!=null ) return d % parseNum(rightArg);
return parseNum(leftArg) % d;
}
public static double pow(double d, HashMap<String, String[]> parameters) {
String leftArg = parameters.get("leftArg")[0];
String rightArg = parameters.get("rightArg")[0];
if( rightArg!=null ) return Math.pow(d,parseNum(rightArg));
return Math.pow(parseNum(leftArg),d);
}
private static double and(double l, double r) {
return (l == 0 || r == 0) ? 0 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 1);
}
public static double and(double d, HashMap<String, String[]> parameters) {
String leftArg = parameters.get("leftArg")[0];
String rightArg = parameters.get("rightArg")[0];
if( rightArg!=null ) return and(d, parseNum(rightArg));
return and(parseNum(leftArg), d);
}
private static double or(double l, double r) {
return (l == 1 || r == 1) ? 1 : (Double.isNaN(l) || Double.isNaN(r) ? Double.NaN : 0);
}
public static double or(double d, HashMap<String, String[]> parameters) {
String leftArg = parameters.get("leftArg")[0];
String rightArg = parameters.get("rightArg")[0];
if( rightArg!=null ) return or(d, parseNum(rightArg));
return or(parseNum(leftArg), d);
}
private static double intDiv(double l, double r) {
return (((int) r) == 0) ? Double.NaN : (int) l / (int) r;
}
public static double intDiv(double d, HashMap<String, String[]> parameters) {
String leftArg = parameters.get("leftArg")[0];
String rightArg = parameters.get("rightArg")[0];
if( rightArg!=null ) return intDiv(d, parseNum(rightArg));
return intDiv(parseNum(leftArg), d);
}
public static String[] strsplit(String s, HashMap<String,String[]> parameters) {
String pattern = parameters.get("split")[0];
return s.split(pattern);
}
public static double asnumeric(String s, HashMap<String, String[]> parameters) {
return parseNum(s);
}
public static String trim(String s, HashMap<String, String[]> parameters) {
return s.trim();
}
public static String replaceall(String s, HashMap<String, String[]> parameters) {
String pattern = parameters.get("pattern")[0];
String replacement = parameters.get("replacement")[0];
boolean ignoreCase = parameters.get("ignore_case")[0].equals("TRUE");
return ignoreCase
? s.replaceAll("(?i)"+Pattern.quote(pattern),replacement)
: s.replaceAll(pattern,replacement);
}
public static String toupper(String s, HashMap<String, String[]> parameters) {
return s.toUpperCase();
}
public static String tolower(String s, HashMap<String, String[]> parameters) {
return s.toLowerCase();
}
public static String cut(double d, HashMap<String, String[]> parameters) {
String[] breaks = parameters.get("breaks");
String[] labels = parameters.get("labels");
boolean lowest = parameters.get("include_lowest")[0].equals("TRUE");
boolean rite = parameters.get("right")[0].equals("TRUE");
if( Double.isNaN(d) || (lowest && d < parseNum(breaks[0]))
|| (!lowest && d <= parseNum(breaks[0]))
|| (rite && d > parseNum(breaks[breaks.length-1]))
|| (!rite && d >= parseNum(breaks[breaks.length-1]))) return "";
else {
for(int i=1;i<breaks.length;++i) {
if( rite )
if( d <= parseNum(breaks[i]) ) return labels[i-1];
else if( d < parseNum(breaks[i]) ) return labels[i-1];
}
}
return "";
}
public static double nchar(String s, HashMap<String, String[]> parameters) {
return s.length();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.