index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Impute.java
package water.api; import water.*; import water.exec.ASTTable; import water.exec.ASTddply.Group; import water.exec.Env; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import water.nbhm.NonBlockingHashMap; import water.util.Log; import java.util.Arrays; public class Impute extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public static final String DOC_GET = "Impute"; @API(help = "Data Frame containing columns to be imputed.", required = true, filter = Default.class, json=true) public Frame source; @API(help="Column which to impute.", required=true, filter=columnVecSelect.class, json=true) public Vec column; class columnVecSelect extends VecClassSelect { columnVecSelect() { super("source"); } } @API(help = "Method of impute: Mean, Median, Most Common", required = true, filter = Default.class, json=true) //, Regression, RandomForest public Method method = Method.mean; class colsFilter1 extends MultiVecSelect { public colsFilter1() { super("source");} } @API(help = "Columns to Select for Grouping", filter=colsFilter1.class) int[] group_by; public enum Method { mean, median, mode // regression, // randomForest } public Impute() {} protected boolean init() throws IllegalArgumentException { // Input handling if (source == null || column == null) throw new IllegalArgumentException("Missing data or input column!"); if (column.isBad()) { Log.info("Column is 100% NAs, nothing to do."); return true; } if (method != Method.mean && method != Method.median && method != Method.mode) // || method != Method.regression || method != Method.randomForest throw new IllegalArgumentException("method must be one of (mean, median, mode)"); // regression, randomForest)"); if ( !(column.isEnum()) && column.naCnt() <= 0) { Log.info("No NAs in the column, nothing to do."); return true; } if (column.isEnum() && !Arrays.asList(column._domain).contains("NA") && column.naCnt() <= 0 ) { Log.info("No NAs in the column, nothing to do."); return true; } // if (method == Method.regression && (column.isEnum() || column.isUUID() || column.isTime())) // throw new IllegalArgumentException("Trying to perform regression on non-numeric column! Please select a different column."); if (method == Method.mode && (!column.isEnum())) throw new IllegalArgumentException("Method `mode` only applicable to factor columns."); if (column.isEnum() && method != Method.mode) { Log.warn("Column to impute is a factor column, changing method to mode."); method = Method.mode; } return false; } @Override protected Response serve() { if (init()) return Inspect2.redirect(this, source._key.toString()); final int col_id = source.find(column); final int[] _cols = group_by; final Key mykey = Key.make(); try { if (group_by == null) { // just use "method" using the input "column" double _replace_val = 0; if (method == Method.mean) { _replace_val = column.mean(); } else if (method == Method.median) { QuantilesPage qp = new QuantilesPage(); qp.source_key = source; qp.column = column; qp.invoke(); _replace_val = qp.result; } else if (method == Method.mode) { String dom[] = column.domain(); long[][] levels = new long[1][]; levels[0] = new Vec.CollectDomain(column).doAll(new Frame(column)).domain(); long[][] counts = new ASTTable.Tabularize(levels).doAll(column)._counts; long maxCounts = -1; int mode = -1; for (int i = 0; i < counts[0].length; ++i) { if (counts[0][i] > maxCounts && !dom[i].equals("NA")) { // check for "NA" in domain -- corner case from R maxCounts = counts[0][i]; mode = i; } } _replace_val = mode != -1 ? (double) mode : (double) Arrays.asList(dom).indexOf("NA"); // could produce -1 if "NA" not in the domain -- that is we don't have the R corner case if (_replace_val == -1) _replace_val = Double.NaN; // OK to replace, since we're in the elif "mode" block } final double rv = _replace_val; new MRTask2() { @Override public void map(Chunk[] cs) { Chunk c = cs[col_id]; int rows = c.len(); for (int r = 0; r < rows; ++r) { if (c.isNA0(r) || (c._vec.isEnum() && c._vec.domain()[(int) c.at0(r)].equals("NA"))) { if (!Double.isNaN(rv)) c.set0(r, rv); // leave as NA if replace value is NA } } } }.doAll(source); } else { // collect the groups HashMap and the frame from the ddply. // create a vec of group IDs (each row is in some group) // MRTask over the rows water.exec.Exec2.exec(Key.make().toString() + " = anonymous <- function(x) \n{\n " + method + "(x[," + (col_id + 1) + "])\n}").remove_and_unlock(); Env env = water.exec.Exec2.exec(mykey.toString() + " = ddply(" + source._key.toString() + ", " + toAryString(_cols) + ", anonymous)"); final Frame grp_replacement = new Frame(env.peekAry()); env.remove_and_unlock(); final GroupTask grp2val = new GroupTask(grp_replacement.numCols() - 1).doAll(grp_replacement); new MRTask2() { @Override public void map(Chunk[] cs) { Chunk c = cs[col_id]; int rows = cs[0].len(); for (int r = 0; r < rows; ++r) { if (c.isNA0(r) || (c._vec.isEnum() && c._vec.domain()[(int) c.at0(r)].equals("NA"))) { Group g = new Group(_cols.length); g.fill(r, cs, _cols); if (grp2val._grp2val.get(g) == null) continue; double rv = grp2val._grp2val.get(g); c.set0(r, rv); } } } }.doAll(source); } return Inspect2.redirect(this, source._key.toString()); } catch( Throwable t ) { return Response.error(t); } finally { // Delete frames UKV.remove(mykey); } } private String toAryString(int[] c) { String res = "c("; for (int i = 0; i < c.length; ++i) { if (i ==c.length-1) res += String.valueOf(c[i] + 1) + ")"; // + 1 for 0 -> 1 based indexing else res += String.valueOf(c[i]+1)+","; // + 1 for 0 -> 1 based indexing } return res; } @Override public boolean toHTML( StringBuilder sb ) { return super.toHTML(sb); } // Create a table: Group -> Impute value private static class GroupTask extends MRTask2<GroupTask> { protected NonBlockingHashMap<Group, Double> _grp2val = new NonBlockingHashMap<Group, Double>(); int[] _cols; GroupTask(int ncols) { _cols = new int[ncols]; for (int i = 0; i < _cols.length; ++i) _cols[i] = i;} @Override public void map(Chunk[] cs) { int rows = cs[0].len(); Chunk vals = cs[cs.length-1]; for (int row = 0; row < rows; ++row) { Group g = new Group(_cols.length); g.fill(row, cs, _cols); double val = vals.at0(row); _grp2val.putIfAbsent(g, val); } } @Override public void reduce( GroupTask gt) { for (Group g : gt._grp2val.keySet()) { Double val = gt._grp2val.get(g); if (g != null && val != null) _grp2val.putIfAbsent(g, val); } } // Custom serialization for NBHM. Much nicer when these are auto-gen'd. // Only sends Groups over the wire, NOT NewChunks with rows. @Override public AutoBuffer write( AutoBuffer ab ) { super.write(ab); if( _grp2val == null ) return ab.put4(0); ab.put4(_grp2val.size()); for( Group g : _grp2val.keySet() ) { ab.put(g); ab.put8d(_grp2val.get(g)); } return ab; } @Override public GroupTask read( AutoBuffer ab ) { super.read(ab); int len = ab.get4(); if( len == 0 ) return this; _grp2val= new NonBlockingHashMap<Group,Double>(); for( int i=0; i<len; i++ ) _grp2val.put(ab.get(Group.class),ab.get8d()); return this; } @Override public void copyOver( Freezable dt ) { GroupTask that = (GroupTask)dt; super.copyOver(that); this._cols = that._cols; this._grp2val = that._grp2val; } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Inspect2.java
package water.api; import hex.KMeans2; import hex.ReBalance; import hex.deeplearning.DeepLearning; import hex.drf.DRF; import hex.gbm.GBM; import hex.glm.GLM2; import hex.singlenoderf.SpeeDRF; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import water.*; import water.api.Inspect2.ColSummary.ColType; import water.fvec.*; import water.util.UIUtils; import java.text.DecimalFormat; public class Inspect2 extends Request2 { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Inspect a fluid-vec frame"; static final String NA = ""; // not available information @API(help="An existing H2O Frame key.", required=true, filter=Default.class, gridable=false) Frame src_key; @API(help="Offset to begin viewing rows, or -1 to see a structural representation of the data", filter=Default.class, lmin=-1, lmax=Long.MAX_VALUE) long offset; @API(help="Number of data rows.") long numRows; @API(help="Number of data columns.") int numCols; @API(help="byte size in memory.") long byteSize; // An internal JSON-output-only class static class ColSummary extends Iced { public static enum ColType { Enum, Int, Real, Time, UUID }; static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public ColSummary( String name, Vec vec ) { this.name = name; this.type = vec.isEnum() ? ColType.Enum : vec.isUUID() ? ColType.UUID : (vec.isInt() ? (vec.isTime() ? ColType.Time : ColType.Int) : ColType.Real); boolean numeric = !vec.isEnum() && !vec.isUUID(); this.min = vec.isEnum() ? Double.NaN : vec.min(); this.max = vec.isEnum() ? Double.NaN : vec.max(); this.mean = vec.isEnum() ? Double.NaN : vec.mean(); this.sdev = vec.isEnum() ? Double.NaN : vec.sigma(); this.naCnt= vec.naCnt(); this.cardinality = vec.cardinality(); } @API(help="Label." ) final String name; @API(help="type." ) final ColType type; @API(help="min." ) final double min; @API(help="max." ) final double max; @API(help="mean." ) final double mean; @API(help="std deviation." ) final double sdev; @API(help="Missing elements.") final long naCnt; @API(help="Cardinality.") final long cardinality; } @API(help="Array of Column Summaries.") ColSummary cols[]; // Called from some other page, to redirect that other page to this page. public static Response redirect(Request req, String src_key) { return Response.redirect(req, "/2/Inspect2", "src_key", src_key ); } // Just validate the frame, and fill in the summary bits @Override protected Response serve() { if( src_key == null ) return RequestServer._http404.serve(); numRows = src_key.numRows(); numCols = src_key.numCols(); Futures fs = new Futures(); for( int i=0; i<numCols; i++ ) src_key.vecs()[i].rollupStats(fs); fs.blockForPending(); byteSize = src_key.byteSize(); cols = new ColSummary[numCols]; for( int i=0; i<cols.length; i++ ) cols[i] = new ColSummary(src_key._names[i],src_key.vecs()[i]); return Response.done(this); } public static String jsonLink(Key key){return "2/Inspect2.json?src_key=" + key;} private static final DecimalFormat mean_dformat = new DecimalFormat("###.###"); @Override public boolean toHTML( StringBuilder sb ) { Key skey = Key.make(input("src_key")); // Missing/NA count long naCnt = 0; // Enum column is in dataset boolean enumCol = false; for( int i=0; i<cols.length; i++ ) { naCnt += cols[i].naCnt; enumCol |= cols[i].type == ColType.Enum; } Vec svecs[] = src_key.vecs(); DocGen.HTML.title(sb,skey.toString()); DocGen.HTML.section(sb,""+String.format("%,d",numCols)+" columns, "+String.format("%,d",numRows)+" rows, "+ PrettyPrint.bytes(byteSize)+" bytes (compressed), "+ (naCnt== 0 ? "no":PrettyPrint.bytes(naCnt))+" missing elements"); sb.append("<div class='alert'>" + //"<br/> Expand factors using " + OneHot.link(skey, "One Hot Expansion") + //"View " + SummaryPage2.link(key, "Summary") + "<br/>Build models using " + SpeeDRF.link(skey, "Random Forest") +", "+ DRF.link(skey, "BigData Random Forest") +", "+ GBM.link(skey, "Distributed GBM") +", "+ GLM2.link(skey, "Generalized Linear Modeling") +", "+ DeepLearning.link(skey, "Deep Learning") +", "+ hex.LR2.link(skey, "Linear Regression") + ", " + KMeans2.link(skey, "K-Means") + "<br>"+ SummaryPage2.link(skey,"Summary")+", "+ DownloadDataset.link(skey, "Download as CSV")+", "+ ExportFiles.link(skey, "Export to file")+", "+ UIUtils.qlink(FrameSplitPage.class, skey, "Split frame") + ", " + UIUtils.qlink(NFoldFrameExtractPage.class, skey, "N-fold extract") + ", " + UIUtils.qlink(ReBalance.class, skey, "ReBalance frame (load balancing)") + "</div>"); String _scrollto = String.valueOf(offset - 1); sb.append( " <script>$(document).ready(function(){ " + " $('html, body').animate({ scrollTop: $('#row_"+_scrollto+"').offset().top" + "}, 2000);" + "return false;" + "});</script>"); sb.append( "<form class='well form-inline' action='Inspect2.html' id='inspect'>" + " <input type='hidden' name='src_key' value="+skey.toString()+">" + " <input type='text' class='input-small span5' placeholder='filter' " + " name='offset' id='offset' value='"+offset+"' maxlength='512'>" + " <button type='submit' class='btn btn-primary'>Jump to row!</button>" + "</form>"); // Start of where the pagination table goes. For now, just the info button. sb.append(pagination(src_key.numRows(), skey)); DocGen.HTML.arrayHead(sb); // Column labels // " <button type='submit' class='btn btn-primary'>Jump to row!</button>" + sb.append("<tr class='warning'>"); sb.append("<td>").append("Row").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td><b>").append(cols[i].name).append("</b></td>"); sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Change Type").append("</td>"); for( int i=0; i<cols.length; i++ ) { if(cols[i].type==ColType.Int) { String btn = "<span class='btn_custom'>\n"; btn += "<a href='ToEnum2.html?src_key=" + src_key._key.toString() + "&column_index=" + (i+1) + "'>" + "<button type='submit' class='btn btn-custom'>As Factor</button>\n"; btn += "</span>\n"; sb.append("<td><b>").append(btn).append("</b></td>"); continue; } if(src_key.vecs()[i] instanceof TransfVec) { String btn2 = "<span class='btn_custom'>\n"; btn2 += "<a href='ToInt2.html?src_key=" + src_key._key.toString() + "&column_index=" + (i+1) + "'>" + "<button type='submit' class='btn btn-custom'>As Integer</button>\n"; btn2 += "</span>\n"; sb.append("<td><b>").append(btn2).append("</b></td>"); continue; } if(cols[i].type != ColType.Int) { sb.append("<td><b>").append("").append("</b></td>"); continue; } } sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Type").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td><b>").append(cols[i].type).append("</b></td>"); sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Min").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append(cols[i].type==ColType.Enum ? NA : x1(svecs[i],-1,cols[i].min)).append("</td>"); sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Max").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append(cols[i].type==ColType.Enum ? NA : x1(svecs[i],-1,cols[i].max)).append("</td>"); sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Mean").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append((cols[i].type == ColType.Enum) || (cols[i].type == ColType.UUID) ? NA : mean_dformat.format(cols[i].mean)).append("</td>"); sb.append("</tr>"); sb.append("<tr class='warning'>"); sb.append("<td>").append("Std Dev").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append((cols[i].type == ColType.Enum) || (cols[i].type == ColType.UUID) ? NA : mean_dformat.format(cols[i].sdev)).append("</td>"); sb.append("</tr>"); // Cardinality row is shown only if dataset contains enum-column if (enumCol) { sb.append("<tr class='warning'>"); sb.append("<td>").append("Cardinality").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append(cols[i].type==ColType.Enum ? String.format("%d",cols[i].cardinality) : NA).append("</td>"); sb.append("</tr>"); } // Missing / NA row is optional; skip it if the entire dataset is clean if( naCnt > 0 ) { sb.append("<tr class='warning'>"); sb.append("<td>").append("Missing").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append(cols[i].naCnt > 0 ? Long.toString(cols[i].naCnt) : NA).append("</td>"); sb.append("</tr>"); } if( offset == -1 ) { // Info display sb.append("<tr class='warning'>"); // An extra row holding vec's compressed bytesize sb.append("<td>").append("Size").append("</td>"); for( int i=0; i<cols.length; i++ ) sb.append("<td>").append(PrettyPrint.bytes(svecs[i].byteSize())).append("</td>"); sb.append("</tr>"); // All Vecs within a frame are compatible, so just read the // home-node/data-placement and start-row from 1st Vec Vec c0 = src_key.anyVec(); int N = c0.nChunks(); for( int j=0; j<N; j++ ) { // All the chunks sb.append("<tr>"); // Row header // 1st column: report data home node (data placement), and row start sb.append("<td>").append(c0.chunkKey(j).home_node()) .append(", ").append(c0.chunk2StartElem(j)).append("</td>"); for( int i=0; i<cols.length; i++ ) { // Report chunk-type (compression scheme) String clazz = svecs[i].chunkForChunkIdx(j).getClass().getSimpleName(); String trim = clazz.replaceAll("Chunk",""); sb.append("<td>").append(trim).append("</td>"); } sb.append("</tr>"); } } else { // Row/data display // First N rows int N = (int)Math.min(100,numRows-offset); for( int j=0; j<N; j++ ) {// N rows sb.append("<tr id='row_"+String.valueOf(offset+j)+"'>"); // Row header sb.append("<td>").append(offset+j).append("</td>"); for( int i=0; i<cols.length; i++ ) // Columns w/in row sb.append("<td>").append(x0(svecs[i],offset+j)).append("</td>"); sb.append("</tr>"); } } DocGen.HTML.arrayTail(sb); return true; } // --- // Return a well-formatted string for this kind of Vec public static String x0( Vec v, long row ) { if( !v.isUUID() ) return x1(v,row,v.at(row)); // UUID handling if( v.isNA(row) ) return x1(v,row,Double.NaN); return "<b style=\"font-family:monospace;\">"+PrettyPrint.UUID(v.at16l(row),v.at16h(row))+"</b>"; } // Format a row, OR the min/max public static String x1( Vec v, long row, double d ) { if( (row >= 0 && v.isNA(row)) || Double.isNaN(d) ) return "-"; // Display of missing elements if( v.isEnum() ) return row >= 0 ? v.domain(v.at8(row)) : Long.toString((long)d); if( v.isTime() ) { String tpat = v.timeParse(); DateTime dt = new DateTime(row >= 0 ? v.at8(row) : (long)d); DateTimeFormatter fmt = DateTimeFormat.forPattern(tpat); String str = fmt.print(dt); return str; } if( v.isInt() ) return Long.toString(row >= 0 ? v.at8(row) : (long)d); Chunk c = v.chunkForChunkIdx(0); Class Cc = c.getClass(); if( Cc == C1SChunk.class ) return x2(d,((C1SChunk)c)._scale); if( Cc == C2SChunk.class ) return x2(d,((C2SChunk)c)._scale); return Double.toString(d); } public static String x2( double d, double scale ) { String s = Double.toString(d); // Double math roundoff error means sometimes we get very long trailing // strings of junk 0's with 1 digit at the end... when we *know* the data // has only "scale" digits. Chop back to actual digits int ex = (int)Math.log10(scale); int x = s.indexOf('.'); int y = x+1+(-ex); if( x != -1 && y < s.length() ) s = s.substring(0,x+1+(-ex)); while( s.charAt(s.length()-1)=='0' ) s = s.substring(0,s.length()-1); return s; } public String link(String txt,Key k, long offset, long max){ if(offset != this.offset && 0 <= offset && offset <= max)return "<a href='/2/Inspect2.html?src_key=" + k.toString() + "&offset=" + offset + "'>" + txt + "</a>"; return "<span>" + txt + "</span>"; } private String infoLink(Key k){ return "<a href='/2/Inspect2.html?src_key=" + k.toString() + "&offset=-1'>info</a>"; } public static String link(Key k) { return link(k.toString(), k.toString()); } public static String link(String txt,Key k) { return link(txt, k.toString()); } public static String link(String txt,String key) { return "<a href='/2/Inspect2.html?src_key=" + key + "&offset=0'>" + txt + "</a>"; } private static int viewsz = 100; protected String pagination(long max, Key skey) { final long offset = this.offset; StringBuilder sb = new StringBuilder(); sb.append("<div style='text-align:center;'>"); sb.append("<span class='pagination'><ul>"); sb.append("<li>" + infoLink(skey) + "</li>"); long lastOffset = (max / viewsz) * viewsz; long lastIdx = (max / viewsz); long currentIdx = offset / viewsz; long startIdx = Math.max(currentIdx-5,0); long endIdx = Math.min(startIdx + 11, lastIdx); if (offset == -1) currentIdx = -1; sb.append("<li>" + link("|&lt;",skey,0,lastOffset) + "</li>"); sb.append("<li>" + link("&lt;",skey,offset-viewsz,lastOffset) + "</li>"); for (long i = startIdx; i <= endIdx; ++i) sb.append("<li>" + link(String.valueOf(i),skey,i*viewsz,lastOffset) + "</li>"); sb.append("<li>" + link("&gt;",skey,offset+viewsz,lastOffset) + "</li>"); sb.append("<li>" + link("&gt;|",skey,lastOffset,lastOffset) + "</li>"); sb.append("</ul></span>"); sb.append("</div>"); return sb.toString(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Inspector.java
package water.api; import java.util.HashMap; import java.util.Map; import hex.GridSearch; import hex.glm.GLM2; import water.*; import water.api.RequestBuilders.Response; import water.fvec.Frame; import water.util.RString; import water.util.UIUtils; /** * This is just a simple Spring-like name-driven request redirector. * * <p>The page never returns actual content, but provides a * redirect to proper page.</p> * * <p> * Note: The best redirector would be based on a simple pattern: * incoming class name is suffixed by "View" which composes a redirect link.</p> * */ public class Inspector extends Request2 { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. private static Map<Class, String[]> REDIRECTS; static { REDIRECTS = new HashMap<Class, String[]>(); // All attempts to view frame redirect to Inspect frame REDIRECTS.put(Frame.class, sa("/2/Inspect2", "src_key")); // All attempts to view a model redirect to <model_name>View REDIRECTS.put(Model.class, sa("/2/%typename{}View", "_modelKey")); REDIRECTS.put(GLM2.GLMGrid.class, sa("/2/GLMGridView", "grid_key")); REDIRECTS.put(GridSearch.class, sa("/2/%typename{}Progress", "destination_key")); } @API(help="H2O key to inspect.", filter=Default.class, json=true, required=true, gridable=false) Key src_key; @Override protected Response serve() { Value v = DKV.get(src_key); if (v==null) throw new IllegalArgumentException("Key " + src_key + " does not exist!"); String typename = v.className(); try { Class klazz = Class.forName(typename); if (REDIRECTS.containsKey(klazz)) { String[] r = REDIRECTS.get(klazz); return redirect(klazz.getSimpleName(), r[0], r[1]); } else { // Find first matching class for (Class k : REDIRECTS.keySet()) { if (k.isAssignableFrom(klazz)) { String[] r = REDIRECTS.get(k); return redirect(klazz.getSimpleName(), r[0], r[1]); } } } } catch (ClassNotFoundException e) { // This is critical error since it should not happen return Response.error(e); } throw new IllegalArgumentException("Unknown key type! Key = " + src_key + " and type = " + typename); } public static String link(String txt, String key) { return UIUtils.link(Inspector.class, "src_key", key, txt); } private Response redirect(String typename, String urlTemplate, String keyParamName) { RString r = new RString(urlTemplate); r.replace("typename", typename); return Response.redirect(this, r.toString(), keyParamName, src_key.toString()); } private static String[] sa(String ...s) { return s; } //Called from some other page, to redirect that other page to this page. public static Response redirect(Request req, Key src_key) { return Response.redirect(req, "/2/Inspector", "src_key", src_key.toString()); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/JProfile.java
package water.api; import water.Func; import water.H2O; import water.Iced; import water.Request2; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; import water.util.ProfileCollectorTask; import java.text.DateFormat; import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.TreeMap; public class JProfile extends Func { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Displays profile dumps from all nodes."; @API(help="Stack trace depth", required=true, filter=Default.class, json=true) public int depth = 5; @API(help="This node's name") public String node_name; @API(help="The cloud's name") public String cloud_name; @API(help="Current time") public String time; public static class ProfileSummary extends Iced { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public ProfileSummary( String name, ProfileCollectorTask.NodeProfile profile) { this.name = name; this.profile= profile; } @API(help="Node name") final String name; @API(help="Profile") final ProfileCollectorTask.NodeProfile profile; } @API(help="Array of Profiles, one per Node in the Cluster") public ProfileSummary nodes[]; @Override public void execImpl() { ProfileCollectorTask.NodeProfile profiles[] = new ProfileCollectorTask(depth).invokeOnAllNodes()._result; nodes = new ProfileSummary[H2O.CLOUD.size()]; for( int i=0; i<nodes.length; i++ ) nodes[i] = new ProfileSummary(H2O.CLOUD._memary[i].toString(),profiles[i]); node_name = H2O.SELF.toString(); cloud_name = H2O.NAME; time = DateFormat.getInstance().format(new Date()); for( int i=0; i<nodes.length; i++ ) { Log.info(nodes[i].name); for (int j = 0; j < nodes[i].profile.counts.length; ++j) { Log.info(nodes[i].profile.counts[j]); Log.info(nodes[i].profile.stacktraces[j]); } } } @Override public boolean toHTML( StringBuilder sb ) { // build tab list sb.append("<div class='tabbable tabs-left'>\n"); sb.append(" <ul class='nav nav-tabs' id='nodesTab'>\n"); for( int i = 0; i < nodes.length; ++i ) { sb.append("<li class='").append(i == 0 ? "active" : "").append("'>\n"); sb.append("<a href='#tab").append(i).append("' data-toggle='tab'>"); sb.append(nodes[i].name).append("</a>\n"); sb.append("</li>"); } sb.append("</ul>\n"); // build the tab contents sb.append(" <div class='tab-content' id='nodesTabContent'>\n"); for( int i = 0; i < nodes.length; ++i ) { sb.append("<div class='tab-pane").append(i == 0 ? " active": "").append("' "); sb.append("id='tab").append(i).append("'>\n"); for (int j=0; j<nodes[i].profile.counts.length; ++j) { sb.append("<pre>").append(nodes[i].profile.counts[j]).append("\n").append(nodes[i].profile.stacktraces[j]).append("</pre>"); } sb.append("</div>"); } sb.append(" </div>"); sb.append("</div>"); sb.append("<script type='text/javascript'>" + "$(document).ready(function() {" + " $('#nodesTab a').click(function(e) {" + " e.preventDefault(); $(this).tab('show');" + " });" + "});" + "</script>"); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/JSONOnlyRequest.java
package water.api; import dontweave.gson.JsonObject; import java.util.Properties; import water.NanoHTTPD; /** JSON only request. Throws in any other access mode. * * @author peta */ public abstract class JSONOnlyRequest extends Request { public NanoHTTPD.Response serve(NanoHTTPD server, Properties args, RequestType type) { if (type == RequestType.json) { return super.serve(server,args,type); } else { JsonObject resp = new JsonObject(); resp.addProperty(ERROR,"This request is only provided for browser connections"); return wrap(server, resp); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/JStack.java
package water.api; import java.text.DateFormat; import java.util.Date; import water.H2O; import water.Iced; import water.util.Log; import water.util.JStackCollectorTask; public class JStack extends Request { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Displays stack dumps from all nodes."; @API(help="This node's name") public String node_name; @API(help="The cloud's name") public String cloud_name; @API(help="Current time") public String time; public static class StackSummary extends Iced { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public StackSummary( String name, String traces ) { this.name = name; this.traces= traces; } @API(help="Node name") final String name; @API(help="Stack traces") final String traces; } @API(help="Array of Stack Traces, one per Node in the Cluster") public StackSummary nodes[]; @Override public Response serve() { String traces[] = new JStackCollectorTask().invokeOnAllNodes()._result; nodes = new StackSummary[H2O.CLOUD.size()]; for( int i=0; i<nodes.length; i++ ) nodes[i] = new StackSummary(H2O.CLOUD._memary[i].toString(),traces[i]); node_name = H2O.SELF.toString(); cloud_name = H2O.NAME; time = DateFormat.getInstance().format(new Date()); for( int i=0; i<nodes.length; i++ ) Log.debug(Log.Tag.Sys.WATER,nodes[i].name,nodes[i].traces); return Response.done(this); } @Override public boolean toHTML( StringBuilder sb ) { // build tab list sb.append("<div class='tabbable tabs-left'>\n"); sb.append(" <ul class='nav nav-tabs' id='nodesTab'>\n"); for( int i = 0; i < nodes.length; ++i ) { sb.append("<li class='").append(i == 0 ? "active" : "").append("'>\n"); sb.append("<a href='#tab").append(i).append("' data-toggle='tab'>"); sb.append(nodes[i].name).append("</a>\n"); sb.append("</li>"); } sb.append("</ul>\n"); // build the tab contents sb.append(" <div class='tab-content' id='nodesTabContent'>\n"); for( int i = 0; i < nodes.length; ++i ) { sb.append("<div class='tab-pane").append(i == 0 ? " active": "").append("' "); sb.append("id='tab").append(i).append("'>\n"); sb.append("<pre>").append(nodes[i].traces).append("</pre>"); sb.append("</div>"); } sb.append(" </div>"); sb.append("</div>"); sb.append("<script type='text/javascript'>" + "$(document).ready(function() {" + " $('#nodesTab a').click(function(e) {" + " e.preventDefault(); $(this).tab('show');" + " });" + "});" + "</script>"); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Jobs.java
package water.api; import dontweave.gson.JsonArray; import dontweave.gson.JsonElement; import dontweave.gson.JsonObject; import water.DKV; import water.Job; import water.Job.JobState; import water.Key; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.text.ParseException; import java.util.Date; public class Jobs extends Request { Jobs() { } public static Response redirect(JsonObject resp, Key dest) { JsonObject redir = new JsonObject(); redir.addProperty(KEY, dest.toString()); return Response.redirect(resp, Jobs.class, redir); } @Override protected Response serve() { JsonObject result = new JsonObject(); JsonArray array = new JsonArray(); Job[] jobs = Job.all(); for( int i = jobs.length - 1; i >= 0; i-- ) { JsonObject json = new JsonObject(); json.addProperty(KEY, jobs[i].self().toString()); json.addProperty(DESCRIPTION, jobs[i].description); json.addProperty(DEST_KEY, jobs[i].dest() != null ? jobs[i].dest().toString() : ""); json.addProperty(START_TIME, RequestBuilders.ISO8601.get().format(new Date(jobs[i].start_time))); long end = jobs[i].end_time; JsonObject jobResult = new JsonObject(); Job job = jobs[i]; boolean cancelled; if (cancelled = (job.state==JobState.CANCELLED || job.state==JobState.FAILED)) { if(job.exception != null){ jobResult.addProperty("exception", "1"); jobResult.addProperty("val", jobs[i].exception); } else { jobResult.addProperty("val", "CANCELLED"); } } else if (job.state==JobState.DONE) jobResult.addProperty("val", "OK"); json.addProperty(END_TIME, end == 0 ? "" : RequestBuilders.ISO8601.get().format(new Date(end))); json.addProperty(PROGRESS, job.state==JobState.RUNNING || job.state==JobState.DONE ? jobs[i].progress() : -1); json.addProperty(PROGRESS, end == 0 ? (cancelled ? -2 : jobs[i].progress()) : (cancelled ? -2 : -1)); json.addProperty(CANCELLED, cancelled); json.add("result",jobResult); array.add(json); } result.add(JOBS, array); Response r = Response.done(result); r.setBuilder(JOBS, new ArrayBuilder() { @Override public String caption(JsonArray array, String name) { return ""; } }); r.setBuilder(JOBS + "." + KEY, new ArrayRowElementBuilder() { @Override public String elementToString(JsonElement elm, String contextName) { String html; if( !Job.isRunning(Key.make(elm.getAsString())) ) html = "<button disabled class='btn btn-mini'>X</button>"; else { String keyParam = KEY + "=" + elm.getAsString(); html = "<a href='/Cancel.html?" + keyParam + "'><button class='btn btn-danger btn-mini'>X</button></a>"; } return html; } }); r.setBuilder(JOBS + "." + DEST_KEY, new ArrayRowElementBuilder() { @Override public String elementToString(JsonElement elm, String contextName) { String str = elm.getAsString(); String key = null; try { key = URLEncoder.encode(str,"UTF-8"); } catch( UnsupportedEncodingException e ) { key = str; } return ("".equals(key) || DKV.get(Key.make(str)) == null) ? key : Inspector.link(str, str); } }); r.setBuilder(JOBS + "." + START_TIME, new ArrayRowElementBuilder() { @Override public String elementToString(JsonElement elm, String contextName) { return date(elm.toString()); } }); r.setBuilder(JOBS + "." + END_TIME, new ArrayRowElementBuilder() { @Override public String elementToString(JsonElement elm, String contextName) { return date(elm.toString()); } }); r.setBuilder(JOBS + "." + PROGRESS, new ArrayRowElementBuilder() { @Override public String elementToString(JsonElement elm, String contextName) { return progress(Float.parseFloat(elm.getAsString())); } }); r.setBuilder(JOBS + "." + "result", new ElementBuilder() { @Override public String objectToString(JsonObject obj, String contextName) { if(obj.has("exception")){ String rid = Key.make().toString(); String ex = obj.get("val").getAsString().replace("'", ""); String [] lines = ex.split("\n"); StringBuilder sb = new StringBuilder(lines[0]); for(int i = 1; i < lines.length; ++i){ sb.append("\\n" + lines[i]); } // ex = ex.substring(0,ex.indexOf('\n')); ex = sb.toString(); String res = "\n<a onClick=\"" + "var showhide=document.getElementById('" + rid + "');" + "if(showhide.innerHTML == '') showhide.innerHTML = '<pre>" + ex + "</pre>';" + "else showhide.innerHTML = '';" + "\">FAILED</a>\n<div id='"+ rid +"'></div>\n"; return res; } else if(obj.has("val")){ return obj.get("val").getAsString(); } return ""; } @Override public String build(String elementContents, String elementName) { return "<td>" + elementContents + "</td>"; } }); return r; } private static String date(String utc) { if( utc == null || utc.length() == 0 ) return ""; utc = utc.replaceAll("^\"|\"$",""); if( utc.length() == 0 ) return ""; long ms; try { ms = RequestBuilders.ISO8601.get().parse(utc).getTime(); } catch( ParseException e ) { throw new RuntimeException(e); } return "<script>document.write(new Date(" + ms + ").toLocaleTimeString())</script>"; } private static String progress(float value) { int pct = (int) (value * 100); String type = "progress-stripped active"; if (pct==-100) { // task is done pct = 100; type = "progress-success"; } else if (pct==-200) { pct = 100; type = "progress-warning"; } // @formatter:off return "" + "<div style='margin-bottom:0px;padding-bottom:0xp;margin-top:8px;height:5px;width:180px' class='progress "+type+"'>" // + "<div class='bar' style='width:" + pct + "%;'>" // + "</div>" // + "</div>"; // @formatter:on } @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_V1_V2; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/KillMinus3.java
package water.api; /** * Created by tomk on 5/19/14. */ public class KillMinus3 extends Request { private static String getProcessId() throws Exception { // Note: may fail in some JVM implementations // therefore fallback has to be provided // something like '<pid>@<hostname>', at least in SUN / Oracle JVMs final String jvmName = java.lang.management.ManagementFactory.getRuntimeMXBean().getName(); final int index = jvmName.indexOf('@'); if (index < 1) { // part before '@' empty (index = 0) / '@' not found (index = -1) throw new Exception ("Can't get process Id"); } return Long.toString(Long.parseLong(jvmName.substring(0, index))); } @Override public Response serve(){ try { String cmd = "/bin/kill -3 " + getProcessId(); java.lang.Runtime.getRuntime().exec(cmd); } catch (Exception xe) {} return Response.doneEmpty(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Levels2.java
package water.api; import water.*; import water.fvec.Frame; import water.fvec.Vec; import water.util.RString; public class Levels2 extends Request2 { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Returns the factor levels of each column in a frame"; @API(help="An existing H2O Frame key.", required=true, filter=Default.class) Frame source; class colsFilter1 extends MultiVecSelect { public colsFilter1() { super("source");} } @API(help = "Select columns", filter=colsFilter1.class) int[] cols; @API(help = "Maximum columns to show summaries of", filter = Default.class, lmin = 1) int max_ncols = 1000; @API(help = "Factor levels of each column") String[][] levels; public static String link(Key k, String content) { RString rs = new RString("<a href='Levels2.query?source=%$key'>"+content+"</a>"); rs.replace("key", k.toString()); return rs.toString(); } @Override protected Response serve() { // select all columns by default if( cols == null ) { cols = new int[Math.min(source.vecs().length,max_ncols)]; for(int i = 0; i < cols.length; i++) cols[i] = i; } Vec[] vecs = new Vec[cols.length]; String[] names = new String[cols.length]; for (int i = 0; i < cols.length; i++) { vecs[i] = source.vecs()[cols[i]]; names[i] = source._names[cols[i]]; } levels = new String[cols.length][]; for(int i = 0; i < cols.length; i++) levels[i] = vecs[i].domain() == null ? null : vecs[i].domain().clone(); return Response.done(this); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/LoadModel.java
package water.api; import static water.util.FSUtils.isHdfs; import static water.util.FSUtils.isS3N; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import water.Model; import water.Request2; import water.persist.PersistHdfs; import water.serial.Model2FileBinarySerializer; import water.serial.Model2HDFSBinarySerializer; public class LoadModel extends Request2 { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @API(help = "Path to a file with saved model.", required = true, filter = Default.class, gridable = false) String path; @API(help = "Loaded model") Model model; @Override protected Response serve() { if (isHdfs(path) || isS3N(path)) loadFromHdfs(); else loadFromLocalFS(); return Inspector.redirect(this, model._key); } private void loadFromLocalFS() { File f = new File(path); if (!f.exists()) throw new IllegalArgumentException("File " +path+" does not exist!"); try { model =new Model2FileBinarySerializer().load(f); } catch( IOException e ) { throw new IllegalArgumentException("Cannot load file " + path, e); } } private void loadFromHdfs() { Path f = new Path(path); try { FileSystem fs = FileSystem.get(f.toUri(), PersistHdfs.CONF); model = new Model2HDFSBinarySerializer(fs, false).load(f); } catch( IOException e ) { throw new IllegalArgumentException("Cannot load file " + path, e); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/LogAndEcho.java
package water.api; import dontweave.gson.JsonObject; import water.util.Log; public class LogAndEcho extends Request { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_V1_V2; } protected final Str _message = new Str("message", ""); @Override protected Response serve() { String s = _message.value(); Log.info(s); JsonObject response = new JsonObject(); response.addProperty("message", s); return Response.done(response); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/LogView.java
package water.api; import java.io.*; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Properties; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import water.*; import water.util.*; import dontweave.gson.JsonObject; public class LogView extends Request { @Override protected Response serve() { String s = water.util.Log.getLogPathFileName(); JsonObject result = new JsonObject(); File f = new File (s); String contents = Utils.readFile(f); if (contents == null) { contents = "Not yet initialized, please refresh..."; } result.addProperty("log", "<pre>" + contents + "</pre>"); Response response = Response.done(result); response.addHeader("<a class='btn btn-primary' href='LogDownload.html'>Download all logs</a>"); return response; } @Override protected boolean log() { return false; } static class LogDownload extends Request { @Override public water.NanoHTTPD.Response serve(NanoHTTPD server, Properties args, RequestType type) { Log.info("\nCollecting logs."); // collect nodes' logs LogCollectorTask collector = new LogCollectorTask(); collector.invokeOnAllNodes(); // FIXME put here zip for each file. String outputFileStem = getOutputLogStem(); byte[] result = null; try { result = zipLogs(collector._result, outputFileStem); } catch (IOException e) { // put the exception into output log result = e.toString().getBytes(); } NanoHTTPD.Response res = server.new Response(NanoHTTPD.HTTP_OK,NanoHTTPD.MIME_DEFAULT_BINARY, new ByteArrayInputStream(result)); res.addHeader("Content-Length", Long.toString(result.length)); res.addHeader("Content-Disposition", "attachment; filename="+outputFileStem + ".zip"); return res; } @Override protected Response serve() { throw new RuntimeException("Get should not be called from this context"); } private String getOutputLogStem() { String pattern = "yyyyMMdd_hhmmss"; SimpleDateFormat formatter = new SimpleDateFormat(pattern); String now = formatter.format(new Date()); return "h2ologs_" + now; } private byte[] zipLogs(byte[][] results, String topDir) throws IOException { int l = 0; assert H2O.CLOUD._memary.length == results.length : "Unexpected change in the cloud!"; for (int i = 0; i<results.length;l+=results[i++].length); ByteArrayOutputStream baos = new ByteArrayOutputStream(l); // Add top-level directory. ZipOutputStream zos = new ZipOutputStream(baos); { ZipEntry zde = new ZipEntry (topDir + File.separator); zos.putNextEntry(zde); } try { // Add zip directory from each cloud member. for (int i =0; i<results.length; i++) { String filename = topDir + File.separator + "node" + i + H2O.CLOUD._memary[i].toString().replace(':', '_').replace('/', '_') + ".zip"; ZipEntry ze = new ZipEntry(filename); zos.putNextEntry(ze); zos.write(results[i]); zos.closeEntry(); } // Close the top-level directory. zos.closeEntry(); } finally { // Close the full zip file. zos.close(); } return baos.toByteArray(); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/ModelMetrics.java
package water.api; import java.util.*; import dontweave.gson.*; import water.DKV; import water.Key; import water.Request2; import water.H2O; import water.Value; import water.Iced; import water.fvec.Frame; import water.util.Log; public class ModelMetrics extends Request2 { /////////////////////// // Request2 boilerplate /////////////////////// static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Return the list of model metrics."; public static String link(Key k, String content){ return "<a href='/2/ModelMetrics'>" + content + "</a>"; } //////////////// // Query params: //////////////// @API(help="An existing H2O ModelMetrics key.", required=false, filter=Default.class) water.ModelMetrics key = null; @API(help="Expand the referenced Model and Frame objects.", required=false, filter=Default.class) boolean expand_references = false; ///////////////// // The Code (tm): ///////////////// public static final Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().setPrettyPrinting().create(); /** * Fetch all ModelMetrics from the KV store. */ protected static List<water.ModelMetrics>fetchAll() { return new ArrayList<water.ModelMetrics>(H2O.KeySnapshot.globalSnapshot().fetchAll(water.ModelMetrics.class).values()); } /** * For one or more water.ModelMetrics from the KV store return Response containing a map of them. */ private Response serveOneOrAll(List<water.ModelMetrics> list) { JsonArray metricsArray = new JsonArray(); for (water.ModelMetrics metrics : list) { JsonObject metricsJson = metrics.toJSON(); metricsArray.add(metricsJson); } JsonObject result = new JsonObject(); result.add("metrics", metricsArray); return Response.done(result); } @Override protected Response serve() { if (null == this.key) { return serveOneOrAll(fetchAll()); } else { // just serve it water.ModelMetrics mm = this.key; List<water.ModelMetrics> list = new ArrayList(); list.add(mm); return serveOneOrAll(list); } } // serve() } // class Frames
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Models.java
package water.api; import dontweave.gson.Gson; import dontweave.gson.GsonBuilder; import dontweave.gson.JsonElement; import dontweave.gson.JsonObject; import hex.VarImp; import hex.deeplearning.DeepLearning; import hex.drf.DRF; import hex.gbm.GBM; import hex.glm.GLM2; import hex.glm.GLMModel; import hex.singlenoderf.SpeeDRF; import hex.nb.NaiveBayes; import hex.nb.NBModel; import org.apache.commons.math3.util.Pair; import water.*; import water.api.Frames.FrameSummary; import water.fvec.Frame; import java.util.*; import static water.util.ParamUtils.*; public class Models extends Request2 { /////////////////////// // Request2 boilerplate /////////////////////// static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Return the list of models."; public static String link(Key k, String content){ return "<a href='/2/Models'>" + content + "</a>"; } //////////////// // Query params: //////////////// @API(help="An existing H2O Model key.", required=false, filter=Default.class) Model key = null; @API(help="Find Frames that are compatible with the Model.", required=false, filter=Default.class) boolean find_compatible_frames = false; @API(help="An existing H2O Frame key to score with the Model which is specified by the key parameter.", required=false, filter=Default.class) Frame score_frame = null; @API(help="Should we adapt() the Frame to the Model?", required=false, filter=Default.class) boolean adapt = true; ///////////////// // The Code (tm): ///////////////// public static final Gson gson = new GsonBuilder().serializeSpecialFloatingPointValues().setPrettyPrinting().create(); public static final class ModelSummary { public String[] warnings = new String[0]; public String model_algorithm = "unknown"; public Model.ModelCategory model_category = Model.ModelCategory.Unknown; public Job.JobState state = Job.JobState.CREATED; public String id = null; public String key = null; public long creation_epoch_time_millis = -1; public long training_duration_in_ms = -1; public List<String> input_column_names = new ArrayList<String>(); public String response_column_name = "unknown"; public Map critical_parameters = new HashMap<String, Object>(); public Map secondary_parameters = new HashMap<String, Object>(); public Map expert_parameters = new HashMap<String, Object>(); public Map variable_importances = null; public Set<String> compatible_frames = new HashSet<String>(); } private static Map whitelistJsonObject(JsonObject unfiltered, Set<String> whitelist) { // If we create a new JsonObject here and serialize it the key/value pairs are inside // a superflouous "members" object, so create a Map instead. JsonObject filtered = new JsonObject(); Set<Map.Entry<String,JsonElement>> entries = unfiltered.entrySet(); for (Map.Entry<String,JsonElement> entry : entries) { String key = entry.getKey(); if (whitelist.contains(key)) filtered.add(key, entry.getValue()); } return gson.fromJson(gson.toJson(filtered), Map.class); } /** * Fetch all the Frames so we can see if they are compatible with our Model(s). */ private Pair<Map<String, Frame>, Map<String, Set<String>>> fetchFrames() { Map<String, Frame> all_frames = null; Map<String, Set<String>> all_frames_cols = null; if (this.find_compatible_frames) { // caches for this request all_frames = Frames.fetchAll(); all_frames_cols = new TreeMap<String, Set<String>>(); for (Map.Entry<String, Frame> entry : all_frames.entrySet()) { all_frames_cols.put(entry.getKey(), new TreeSet<String>(Arrays.asList(entry.getValue()._names))); } } return new Pair<Map<String, Frame>, Map<String, Set<String>>>(all_frames, all_frames_cols); } private static Map<String, Frame> findCompatibleFrames(Model model, Map<String, Frame> all_frames, Map<String, Set<String>> all_frames_cols) { Map<String, Frame> compatible_frames = new TreeMap<String, Frame>(); Set<String> model_column_names = new HashSet(Arrays.asList(model._names)); for (Map.Entry<String, Set<String>> entry : all_frames_cols.entrySet()) { Set<String> frame_cols = entry.getValue(); if (frame_cols.containsAll(model_column_names)) { /// See if adapt throws an exception or not. try { Frame frame = all_frames.get(entry.getKey()); Frame[] outputs = model.adapt(frame, false); // TODO: this does too much work; write canAdapt() Frame adapted = outputs[0]; Frame trash = outputs[1]; // adapted.delete(); // TODO: shouldn't we clean up adapted vecs? But we can't delete() the frame as a whole. . . trash.delete(); // A-Ok compatible_frames.put(entry.getKey(), frame); } catch (Exception e) { // skip } } } return compatible_frames; } public static Map<String, ModelSummary> generateModelSummaries(Set<String>keys, Map<String, Model> models, boolean find_compatible_frames, Map<String, Frame> all_frames, Map<String, Set<String>> all_frames_cols) { Map<String, ModelSummary> modelSummaries = new TreeMap<String, ModelSummary>(); if (null == keys) { keys = models.keySet(); } for (String key : keys) { ModelSummary summary = new ModelSummary(); Models.summarizeAndEnhanceModel(summary, models.get(key), find_compatible_frames, all_frames, all_frames_cols); modelSummaries.put(key, summary); } return modelSummaries; } /** * Summarize subclasses of water.Model. */ protected static void summarizeAndEnhanceModel(ModelSummary summary, Model model, boolean find_compatible_frames, Map<String, Frame> all_frames, Map<String, Set<String>> all_frames_cols) { if (model instanceof GLMModel) { summarizeGLMModel(summary, (GLMModel) model); } else if (model instanceof DRF.DRFModel) { summarizeDRFModel(summary, (DRF.DRFModel) model); } else if (model instanceof hex.deeplearning.DeepLearningModel) { summarizeDeepLearningModel(summary, (hex.deeplearning.DeepLearningModel) model); } else if (model instanceof hex.gbm.GBM.GBMModel) { summarizeGBMModel(summary, (hex.gbm.GBM.GBMModel) model); } else if (model instanceof hex.singlenoderf.SpeeDRFModel) { summarizeSpeeDRFModel(summary, (hex.singlenoderf.SpeeDRFModel) model); } else if (model instanceof NBModel) { summarizeNBModel(summary, (NBModel) model); } else { // catch-all summarizeModelCommonFields(summary, model); } if (find_compatible_frames) { Map<String, Frame> compatible_frames = findCompatibleFrames(model, all_frames, all_frames_cols); summary.compatible_frames = compatible_frames.keySet(); } } /** * Summarize fields which are generic to water.Model. */ private static void summarizeModelCommonFields(ModelSummary summary, Model model) { String[] names = model._names; summary.warnings = model.warnings; summary.model_algorithm = model.getClass().toString(); // fallback only // model.job() is a local copy; on multinode clusters we need to get from the DKV Key job_key = ((Job)model.job()).self(); if (null == job_key) throw H2O.fail("Null job key for model: " + (model == null ? "null model" : model._key)); // later when we deserialize models from disk we'll relax this constraint Job job = DKV.get(job_key).get(); summary.state = job.getState(); summary.model_category = model.getModelCategory(); UniqueId unique_id = model.getUniqueId(); summary.id = unique_id.getId(); summary.key = unique_id.getKey(); summary.creation_epoch_time_millis = unique_id.getCreationEpochTimeMillis(); summary.training_duration_in_ms = model.training_duration_in_ms; summary.response_column_name = names[names.length - 1]; for (int i = 0; i < names.length - 1; i++) summary.input_column_names.add(names[i]); // Ugh. VarImp vi = model.varimp(); if (null != vi) { summary.variable_importances = new LinkedHashMap(); summary.variable_importances.put("varimp", vi.varimp); summary.variable_importances.put("variables", vi.getVariables()); summary.variable_importances.put("method", vi.method); summary.variable_importances.put("max_var", vi.max_var); summary.variable_importances.put("scaled", vi.scaled()); } } /****** * GLM2 ******/ private static final Set<String> GLM_critical_params = getCriticalParamNames(GLM2.DOC_FIELDS); private static final Set<String> GLM_secondary_params = getSecondaryParamNames(GLM2.DOC_FIELDS); private static final Set<String> GLM_expert_params = getExpertParamNames(GLM2.DOC_FIELDS); /** * Summarize fields which are specific to hex.glm.GLMModel. */ private static void summarizeGLMModel(ModelSummary summary, hex.glm.GLMModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "GLM"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, GLM_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, GLM_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, GLM_expert_params); } /****** * DRF ******/ private static final Set<String> DRF_critical_params = getCriticalParamNames(DRF.DOC_FIELDS); private static final Set<String> DRF_secondary_params = getSecondaryParamNames(DRF.DOC_FIELDS); private static final Set<String> DRF_expert_params = getExpertParamNames(DRF.DOC_FIELDS); /** * Summarize fields which are specific to hex.drf.DRF.DRFModel. */ private static void summarizeDRFModel(ModelSummary summary, hex.drf.DRF.DRFModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "BigData RF"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, DRF_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, DRF_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, DRF_expert_params); } /****** * SpeeDRF ******/ private static final Set<String> SpeeDRF_critical_params = getCriticalParamNames(SpeeDRF.DOC_FIELDS); private static final Set<String> SpeeDRF_secondary_params = getSecondaryParamNames(SpeeDRF.DOC_FIELDS); private static final Set<String> SpeeDRF_expert_params = getExpertParamNames(SpeeDRF.DOC_FIELDS); /** * Summarize fields which are specific to hex.drf.DRF.SpeeDRFModel. */ private static void summarizeSpeeDRFModel(ModelSummary summary, hex.singlenoderf.SpeeDRFModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "Random Forest"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, SpeeDRF_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, SpeeDRF_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, SpeeDRF_expert_params); } /*************** * DeepLearning ***************/ private static final Set<String> DL_critical_params = getCriticalParamNames(DeepLearning.DOC_FIELDS); private static final Set<String> DL_secondary_params = getSecondaryParamNames(DeepLearning.DOC_FIELDS); private static final Set<String> DL_expert_params =getExpertParamNames(DeepLearning.DOC_FIELDS); /** * Summarize fields which are specific to hex.deeplearning.DeepLearningModel. */ private static void summarizeDeepLearningModel(ModelSummary summary, hex.deeplearning.DeepLearningModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "DeepLearning"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, DL_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, DL_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, DL_expert_params); } /****** * GBM ******/ private static final Set<String> GBM_critical_params = getCriticalParamNames(GBM.DOC_FIELDS); private static final Set<String> GBM_secondary_params = getSecondaryParamNames(GBM.DOC_FIELDS); private static final Set<String> GBM_expert_params = getExpertParamNames(GBM.DOC_FIELDS); /** * Summarize fields which are specific to hex.gbm.GBM.GBMModel. */ private static void summarizeGBMModel(ModelSummary summary, hex.gbm.GBM.GBMModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "GBM"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, GBM_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, GBM_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, GBM_expert_params); } /****** * NB ******/ private static final Set<String> NB_critical_params = getCriticalParamNames(NaiveBayes.DOC_FIELDS); private static final Set<String> NB_secondary_params = getSecondaryParamNames(NaiveBayes.DOC_FIELDS); private static final Set<String> NB_expert_params = getExpertParamNames(NaiveBayes.DOC_FIELDS); /** * Summarize fields which are specific to hex.nb.NBModel. */ private static void summarizeNBModel(ModelSummary summary, hex.nb.NBModel model) { // add generic fields such as column names summarizeModelCommonFields(summary, model); summary.model_algorithm = "Naive Bayes"; JsonObject all_params = (model.get_params()).toJSON(); summary.critical_parameters = whitelistJsonObject(all_params, NB_critical_params); summary.secondary_parameters = whitelistJsonObject(all_params, NB_secondary_params); summary.expert_parameters = whitelistJsonObject(all_params, NB_expert_params); } /** * Fetch all Models from the KV store. */ protected Map<String, Model> fetchAll() { return H2O.KeySnapshot.globalSnapshot().fetchAll(water.Model.class); } /** * Score a frame with the given model. */ protected static Response scoreOne(Frame frame, Model score_model, boolean adapt) { return Frames.scoreOne(frame, score_model); } /** * Fetch all the Models from the KV store, sumamrize and enhance them, and return a map of them. */ private Response serveOneOrAll(Map<String, Model> modelsMap) { // returns empty sets if !this.find_compatible_frames Pair<Map<String, Frame>, Map<String, Set<String>>> frames_info = fetchFrames(); Map<String, Frame> all_frames = frames_info.getFirst(); Map<String, Set<String>> all_frames_cols = frames_info.getSecond(); Map<String, ModelSummary> modelSummaries = Models.generateModelSummaries(null, modelsMap, find_compatible_frames, all_frames, all_frames_cols); Map resultsMap = new LinkedHashMap(); resultsMap.put("models", modelSummaries); // If find_compatible_frames then include a map of the Frame summaries. Should we put this on a separate switch? if (this.find_compatible_frames) { Set<String> all_referenced_frames = new TreeSet<String>(); for (Map.Entry<String, ModelSummary> entry: modelSummaries.entrySet()) { ModelSummary summary = entry.getValue(); all_referenced_frames.addAll(summary.compatible_frames); } Map<String, FrameSummary> frameSummaries = Frames.generateFrameSummaries(all_referenced_frames, all_frames, false, null, null); resultsMap.put("frames", frameSummaries); } // TODO: temporary hack to get things going String json = gson.toJson(resultsMap); JsonObject result = gson.fromJson(json, JsonElement.class).getAsJsonObject(); return Response.done(result); } @Override protected Response serve() { if (null == this.key) { return serveOneOrAll(fetchAll()); } else { if (null == this.score_frame) { Model model = this.key; Map<String, Model> modelsMap = new TreeMap(); // Sort for pretty display and reliable ordering. modelsMap.put(model._key.toString(), model); return serveOneOrAll(modelsMap); } else { return scoreOne(this.score_frame, this.key, this.adapt); } } } // serve() }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/NFoldFrameExtractPage.java
package water.api; import hex.NFoldFrameExtractor; import water.*; import water.fvec.Frame; import water.util.Utils; public class NFoldFrameExtractPage extends Func { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Data frame", required = true, filter = Default.class) public Frame source; @API(help = "N-fold split", required = true, filter = Default.class, lmin=0) public int nfolds = 10; @API(help = "Split to extract", required = true, filter = Default.class, lmin=0) public int afold; @API(help = "Keys for each split partition.") public Key[] split_keys; @API(help = "Holds a number of rows per each output partition.") public long[] split_rows; @Override protected void init() throws IllegalArgumentException { super.init(); if (nfolds > source.numRows()) throw new IllegalArgumentException("Cannot provide more folds than number of rows in dataset!"); if (afold >= nfolds) throw new IllegalArgumentException("Request fold ("+afold+") is greater than number of folds ("+nfolds+")!"); } @Override protected void execImpl() { NFoldFrameExtractor extractor = new NFoldFrameExtractor(source, nfolds, afold, null, null); H2O.submitTask(extractor); Frame[] splits = extractor.getResult(); split_keys = new Key [splits.length]; split_rows = new long[splits.length]; long sum = 0; for(int i=0; i<splits.length; i++) { sum += splits[i].numRows(); split_keys[i] = splits[i]._key; split_rows[i] = splits[i].numRows(); } assert sum == source.numRows() : "Frame split produced wrong number of rows: nrows(source) != sum(nrows(splits))"; } @Override public boolean toHTML(StringBuilder sb) { int nsplits = split_keys.length; String [] headers = new String[nsplits+2]; headers[0] = ""; for(int i=0; i<nsplits; i++) headers[i+1] = "Split #"+i; headers[nsplits+1] = "Total"; DocGen.HTML.arrayHead(sb, headers); // Key table row sb.append("<tr><td>").append(DocGen.HTML.bold("Keys")).append("</td>"); for (int i=0; i<nsplits; i++) { Key k = split_keys[i]; sb.append("<td>").append(Inspect2.link(k)).append("</td>"); } sb.append("<td>").append(Inspect2.link(source._key)).append("</td>"); sb.append("</tr>"); // Number of rows row sb.append("<tr><td>").append(DocGen.HTML.bold("Rows")).append("</td>"); for (int i=0; i<nsplits; i++) { long r = split_rows[i]; sb.append("<td>").append(String.format("%,d", r)).append("</td>"); } sb.append("<td>").append(String.format("%,d", Utils.sum(split_rows))).append("</td>"); sb.append("</tr>"); DocGen.HTML.arrayTail(sb); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/NeuralNetModelView.java
package water.api; import hex.NeuralNet.NeuralNetModel; import water.Key; import water.Request2; import water.UKV; public class NeuralNetModelView extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help="NeuralNet Model Key", required=true, filter=NeuralNetModelKeyFilter.class) Key _modelKey; class NeuralNetModelKeyFilter extends H2OKey { public NeuralNetModelKeyFilter() { super("model_key",true); } } @API(help="NeuralNet Model") NeuralNetModel neuralnet_model; public static String link(String txt, Key model) { return "<a href='NeuralNetModelView.html?_modelKey=" + model + "'>" + txt + "</a>"; } public static Response redirect(Request req, Key modelKey) { return Response.redirect(req, "/2/NeuralNetModelView", "_modelKey", modelKey); } @Override public boolean toHTML(StringBuilder sb){ neuralnet_model.generateHTML("NeuralNet Model", sb); return true; } @Override protected Response serve() { neuralnet_model = UKV.get(_modelKey); if (neuralnet_model == null) return Response.error("Model '" + _modelKey + "' not found!"); else return Response.done(this); } @Override public void toJava(StringBuilder sb) { neuralnet_model.toJavaHtml(sb); } @Override protected String serveJava() { NeuralNetModel m = UKV.get(_modelKey); if (m!=null) return m.toJava(); else return ""; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/NeuralNetProgressPage.java
package water.api; import hex.NeuralNet; import water.Job; import water.Key; import water.UKV; public class NeuralNetProgressPage extends Progress2 { /** Return {@link Response} for finished job. */ @Override protected Response jobDone(final Key dst) { return NeuralNetModelView.redirect(this, dst); } public static Response redirect(Request req, Key jobkey, Key dest) { return Response.redirect(req, "/2/NeuralNetProgressPage", JOB_KEY, jobkey, DEST_KEY, dest); } @Override public boolean toHTML( StringBuilder sb ) { Job jjob = Job.findJob(job_key); if (jjob ==null) return true; NeuralNet.NeuralNetModel m = UKV.get(jjob.dest()); if (m!=null) m.generateHTML("NeuralNet Model", sb); else DocGen.HTML.paragraph(sb, "Pending..."); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/OneHot.java
package water.api; import water.*; import water.fvec.Frame; import water.util.Log; import water.util.RString; public class OneHot extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Data frame", required = true, filter = Default.class) public Frame source; @API(help = "Destination key", required = false, filter = Default.class) protected final Key destination_key = Key.make("__OneHot_" + Key.make()); @API(help = "Ignored columns by name and zero-based index", filter=colsNamesIdxFilter.class, displayName="Ignored columns") public int[] ignored_cols = new int[0]; class colsNamesIdxFilter extends MultiVecSelect { public colsNamesIdxFilter() {super("source", MultiVecSelectType.NAMES_THEN_INDEXES); } } // public static String link(Key k, String content) { // RString rs = new RString("<a href='OneHot.html?source=%$key'>%content</a>"); // rs.replace("key", k.toString()); // rs.replace("content", content); // return rs.toString(); // } @Override protected Response serve() { try { Frame fr = new Frame(destination_key,source._names.clone(),source.vecs().clone()).delete_and_lock(null); fr.remove(ignored_cols); Frame oneHotFrame = hex.OneHot.expandDataset(fr,destination_key); for (int i : ignored_cols) oneHotFrame.add(source._names[i], source.vecs()[i]); oneHotFrame.unlock(null); } catch( Throwable t ) { return Response.error(t); } return Inspect2.redirect(this, destination_key.toString()); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/ParamImportance.java
package water.api; /** Parameter importance category */ public enum ParamImportance { UNIMPORTANT("unimportant", "The parameter does not affect model quality."), CRITICAL ("critical", "The parameter is critical for model quality."), SECONDARY ("secondary", "The parameter is important for model quality."), EXPERT ("expert", "Expert parameter."); /** Printable name */ public final String title; public final String help; private ParamImportance(String title, String help) { this.title = title; this.help = help; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Parse.java
package water.api; import dontweave.gson.JsonObject; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Set; import java.util.regex.Pattern; import water.*; import water.parser.CsvParser; import water.parser.CustomParser; import water.parser.GuessSetup; import water.util.RString; abstract public class Parse extends Request { private final ParserType _parserType= new ParserType(PARSER_TYPE); private final Separator _separator = new Separator(SEPARATOR); private final Bool _header = new Bool(HEADER,false,"Use first line as a header"); protected final Bool _sQuotes = new Bool("single_quotes",false,"Enable single quotes as field quotation character"); protected final HeaderKey _hdrFrom = new HeaderKey("header_from_file",false); protected final Str _excludeExpression = new Str("exclude",""); protected final ExistingCSVKey _source = new ExistingCSVKey(SOURCE_KEY); protected final NewH2OHexKey _dest = new NewH2OHexKey(DEST_KEY); protected final Bool _blocking = new Bool("blocking",false,"Synchronously wait until parse completes"); @SuppressWarnings("unused") private final Preview _preview = new Preview(PREVIEW); public Parse() { _excludeExpression.setRefreshOnChange(); _header.setRefreshOnChange(); _blocking._hideInQuery = true; } // private static String toHTML(ParseSetupGuessException e){ // StringBuilder sb = new StringBuilder("<h3>Unable to Parse</h3>"); // if(!e.getMessage().isEmpty())sb.append("<div>" + e.getMessage() + "</div>"); // if(e._failed != null && e._failed.length > 0){ // sb.append("<div>\n<b>Found " + e._failed.length + " files which are not compatible with the given setup:</b></div>"); // int n = e._failed.length; // if(n > 5){ // sb.append("<div>" + e._failed[0] + "</div>"); // sb.append("<div>" + e._failed[1] + "</div>"); // sb.append("<div>...</div>"); // sb.append("<div>" + e._failed[n-2] + "</div>"); // sb.append("<div>" + e._failed[n-1] + "</div>"); // } else for(int i = 0; i < n;++i) // sb.append("<div>" + e._failed[n-1] + "</div>"); // } else if(e._gSetup == null || !e._gSetup.valid()) { // sb.append("Failed to find consistent parser setup for the given files!"); // } // return sb.toString(); // } protected static class PSetup { final transient ArrayList<Key> _keys; final transient Key [] _failedKeys; final CustomParser.PSetupGuess _setup; PSetup( ArrayList<Key> keys, Key [] fkeys, CustomParser.PSetupGuess pguess) { _keys=keys; _failedKeys = fkeys; _setup = pguess; } }; // An H2O Key Query, which runs the basic CSV parsing heuristics. Accepts // Key wildcards, and gathers all matching Keys for simultaneous parsing. // Multi-key parses are only allowed on compatible CSV files, and only 1 is // allowed to have headers. public class ExistingCSVKey extends TypeaheadInputText<PSetup> { public ExistingCSVKey(String name) { super(TypeaheadKeysRequest.class, name, true); // addPrerequisite(_parserType); // addPrerequisite(_separator); } @Override protected PSetup parse(String input) throws IllegalArgumentException { final Pattern p = makePattern(input); final Pattern exclude; if(_hdrFrom.specified()) _header.setValue(true); exclude = _excludeExpression.specified()?makePattern(_excludeExpression.value()):null; // boolean badkeys = false; final Key [] keyAry = H2O.KeySnapshot.globalSnapshot().filter(new H2O.KVFilter() { @Override public boolean filter(H2O.KeyInfo k) { if(k._rawData && k._nrows > 0) { String ks = k._key.toString(); return (p.matcher(ks).matches() && (exclude == null || !exclude.matcher(ks).matches())); } return false; } }).keys(); ArrayList<Key> keys = new ArrayList<Key>(keyAry.length); for(Key k:keyAry)keys.add(k); // now we assume the first key has the header Key hKey = null; if(_hdrFrom.specified()){ hKey = _hdrFrom.value()._key; _header.setValue(true); } boolean checkHeader = !_header.specified(); boolean hasHeader = _header.value(); CustomParser.ParserSetup userSetup = new CustomParser.ParserSetup(_parserType.value(),_separator.value(),hasHeader, _sQuotes.value()); CustomParser.PSetupGuess setup = null; try { setup = GuessSetup.guessSetup(keys, hKey, userSetup,checkHeader); }catch(GuessSetup.ParseSetupGuessException e){ throw new IllegalArgumentException(e.getMessage()); } if(setup._isValid){ if(setup._hdrFromFile != null) _hdrFrom.setValue(DKV.get(setup._hdrFromFile)); if(!_header.specified()) _header.setValue(setup._setup._header); else setup._setup._header = _header.value(); if(!_header.value()) _hdrFrom.disable("Header is disabled."); PSetup res = new PSetup(keys,null,setup); _parserType.setValue(setup._setup._pType); _separator.setValue(setup._setup._separator); _hdrFrom._hideInQuery = _header._hideInQuery = _separator._hideInQuery = setup._setup._pType != CustomParser.ParserType.CSV; Set<String> dups = setup.checkDupColumnNames(); if(!dups.isEmpty()) throw new IllegalArgumentException("Column labels must be unique but these labels are repeated: " + dups.toString()); return res; } else throw new IllegalArgumentException("Invalid parser setup. " + setup.toString()); } private final String keyRow(Key k){ return "<tr><td>" + k + "</td></tr>\n"; } @Override public String queryComment(){ if(!specified())return ""; PSetup p = value(); StringBuilder sb = new StringBuilder(); if(p._keys.size() <= 10){ for(Key k:p._keys) sb.append(keyRow(k)); } else { int n = p._keys.size(); for(int i = 0; i < 5; ++i) sb.append(keyRow(p._keys.get(i))); sb.append("<tr><td>...</td></tr>\n"); for(int i = 5; i > 0; --i) sb.append(keyRow(p._keys.get(n-i))); } return "<div class='alert'><b> Found " + p._keys.size() + " files matching the expression.</b><br/>\n" + "<table>\n" + sb.toString() + "</table></div>"; } private Pattern makePattern(String input) { // Reg-Ex pattern match all keys, like file-globbing. // File-globbing: '?' allows an optional single character, regex needs '.?' // File-globbing: '*' allows any characters, regex needs '*?' // File-globbing: '\' is normal character in windows, regex needs '\\' String patternStr = input.replace("?",".?").replace("*",".*?").replace("\\","\\\\").replace("(","\\(").replace(")","\\)"); Pattern p = Pattern.compile(patternStr); return p; } @Override protected PSetup defaultValue() { return null; } @Override protected String queryDescription() { return "An existing H2O key (or regex of keys) of CSV text"; } } // A Query String, which defaults to the source Key with a '.hex' suffix protected class NewH2OHexKey extends Str { NewH2OHexKey(String name) { super(name,null/*not required flag*/); addPrerequisite(_source); } @Override protected String defaultValue() { PSetup setup = _source.value(); if( setup == null ) return null; String n = setup._keys.get(0).toString(); // blahblahblah/myName.ext ==> myName int sep = n.lastIndexOf(File.separatorChar); if( sep > 0 ) n = n.substring(sep+1); int dot = n.lastIndexOf('.'); if( dot > 0 ) n = n.substring(0, dot); if( !Character.isJavaIdentifierStart(n.charAt(0)) ) n = "X"+n; char[] cs = n.toCharArray(); for( int i=1; i<cs.length; i++ ) if( !Character.isJavaIdentifierPart(cs[i]) ) cs[i] = '_'; n = new String(cs); int i = 0; String res = n + Extensions.HEX; Key k = Key.make(res); while(DKV.get(k) != null) k = Key.make(res = n + ++i + Extensions.HEX); return res; } @Override protected String queryDescription() { return "Destination hex key"; } } public class HeaderKey extends H2OExistingKey { public HeaderKey(String name, boolean required) { super(name, required); } @Override protected String queryElement() { StringBuilder sb = new StringBuilder(super.queryElement() + "\n"); try{ String [] colnames = _source.value() != null ? _source.value()._setup._setup._columnNames : null; if(colnames != null){ sb.append("<table class='table table-striped table-bordered'>").append("<tr><th>Header:</th>"); for( String s : colnames ) sb.append("<th>").append(s).append("</th>"); sb.append("</tr></table>"); } } catch( Exception e ) { } return sb.toString(); } } // A Query Bool, which includes a pretty HTML-ized version of the first few // parsed data rows. If the value() is TRUE, we display as-if the first row // is a label/header column, and if FALSE not. public class Preview extends Argument { Preview(String name) { super(name,false); // addPrerequisite(_source); // addPrerequisite(_separator); // addPrerequisite(_parserType); // addPrerequisite(_header); setRefreshOnChange(); } @Override protected String queryElement() { // first determine the value to put in the field // if no original value was supplied, use the provided one String[][] data = null; PSetup psetup = _source.value(); if(psetup == null) return _source.specified()?"<div class='alert alert-error'><b>Found no valid setup!</b></div>":""; StringBuilder sb = new StringBuilder(); if(psetup._failedKeys != null){ sb.append("<div class='alert alert-error'>"); sb.append("<div>\n<b>Found " + psetup._failedKeys.length + " files which are not compatible with the given setup:</b></div>"); int n = psetup._failedKeys.length; if(n > 5){ sb.append("<div>" + psetup._failedKeys[0] + "</div>\n"); sb.append("<div>" + psetup._failedKeys[1] + "</div>\n"); sb.append("<div>...</div>"); sb.append("<div>" + psetup._failedKeys[n-2] + "</div>\n"); sb.append("<div>" + psetup._failedKeys[n-1] + "</div>\n"); } else for(int i = 0; i < n;++i) sb.append("<div>" + psetup._failedKeys[n-1] + "</div>\n"); sb.append("</div>\n"); } String [] err = psetup._setup._errors; boolean hasErrors = err != null && err.length > 0; boolean parsedOk = psetup._setup._isValid; String parseMsgType = hasErrors?parsedOk?"warning":"error":"success"; sb.append("<div class='alert alert-" + parseMsgType + "'><b>" + psetup._setup.toString() + "</b>"); if(hasErrors) for(String s:err)sb.append("<div>" + s + "</div>"); sb.append("</div>"); if(psetup._setup != null) data = psetup._setup._data; String [] header = psetup._setup._setup._columnNames; if( data != null ) { sb.append("<table class='table table-striped table-bordered'>"); int j = 0; if( psetup._setup._setup._header && header != null) { // Obvious header display, if asked for sb.append("<tr><th>Row#</th>"); for( String s : header ) sb.append("<th>").append(s).append("</th>"); sb.append("</tr>"); if(header == data[0]) ++j; } for( int i=j; i<data.length; i++ ) { // The first few rows sb.append("<tr><td>Row ").append(i-j).append("</td>"); for( String s : data[i] ) sb.append("<td>").append(s).append("</td>"); sb.append("</tr>"); } sb.append("</table>"); } return sb.toString(); } @Override protected Object parse(String input) throws IllegalArgumentException {return null;} @Override protected Object defaultValue() {return null;} @Override protected String queryDescription() { return "Preview of the parsed data"; } @Override protected String jsRefresh(String callbackName) { return ""; } @Override protected String jsValue() { return ""; } } public static String link(Key k, String content) { return link(k.toString(),content); } public static String link(String k, String content) { RString rs = new RString("<a href='Parse.query?%key_param=%$key'>%content</a>"); rs.replace("key_param", SOURCE_KEY); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } //@Override protected Response serve() { // PSetup p = _source.value(); // if(!p._setup._isValid) // return Response.error("Given parser setup is not valid, I can not parse this file."); // CustomParser.ParserSetup setup = p._setup._setup; // setup._singleQuotes = _sQuotes.value(); // Key dest = Key.make(_dest.value()); // try { // // Make a new Setup, with the 'header' flag set according to user wishes. // Key[] keys = p._keys.toArray(new Key[p._keys.size()]); // Job job = ParseDataset.forkParseDataset(dest, keys,setup); // if (_blocking.value()) { // Job.waitUntilJobEnded(job.self()); // } // JsonObject response = new JsonObject(); // response.addProperty(RequestStatics.JOB, job.self().toString()); // response.addProperty(RequestStatics.DEST_KEY,dest.toString()); // Response r = Progress.redirect(response, job.self(), dest); // r.setBuilder(RequestStatics.DEST_KEY, new KeyElementBuilder()); // return r; // } catch( Throwable e ) { // return Response.error(e); // } //} private class Separator extends InputSelect<Byte> { public Separator(String name) { super(name,false); setRefreshOnChange(); } @Override protected String queryDescription() { return "Utilized separator"; } @Override protected String[] selectValues() { return DEFAULT_IDX_DELIMS; } @Override protected String[] selectNames() { return DEFAULT_DELIMS; } @Override protected Byte defaultValue() {return CsvParser.AUTO_SEP;} public void setValue(Byte b){record()._value = b;} @Override protected String selectedItemValue(){ return value() != null ? value().toString() : defaultValue().toString(); } @Override protected Byte parse(String input) throws IllegalArgumentException { Byte result = Byte.valueOf(input); return result; } } private class ParserType extends InputSelect<CustomParser.ParserType> { public ParserType(String name) { super(name,false); setRefreshOnChange(); _values = new String [CustomParser.ParserType.values().length-1]; int i = 0; for(CustomParser.ParserType t:CustomParser.ParserType.values()) if(t != CustomParser.ParserType.XLSX) _values[i++] = t.name(); } private final String [] _values; @Override protected String queryDescription() { return "File type"; } @Override protected String[] selectValues() { return _values; } @Override protected String[] selectNames() { return _values; } @Override protected CustomParser.ParserType defaultValue() { return CustomParser.ParserType.AUTO; } public void setValue(CustomParser.ParserType pt){record()._value = pt;} @Override protected String selectedItemValue(){ return value() != null ? value().toString() : defaultValue().toString(); } @Override protected CustomParser.ParserType parse(String input) throws IllegalArgumentException { return CustomParser.ParserType.valueOf(input); } } /** List of white space delimiters */ static final String[] WHITE_DELIMS = { "NULL", "SOH (start of heading)", "STX (start of text)", "ETX (end of text)", "EOT (end of transmission)", "ENQ (enquiry)", "ACK (acknowledge)", "BEL '\\a' (bell)", "BS '\b' (backspace)", "HT '\\t' (horizontal tab)", "LF '\\n' (new line)", " VT '\\v' (vertical tab)", "FF '\\f' (form feed)", "CR '\\r' (carriage ret)", "SO (shift out)", "SI (shift in)", "DLE (data link escape)", "DC1 (device control 1) ", "DC2 (device control 2)", "DC3 (device control 3)", "DC4 (device control 4)", "NAK (negative ack.)", "SYN (synchronous idle)", "ETB (end of trans. blk)", "CAN (cancel)", "EM (end of medium)", "SUB (substitute)", "ESC (escape)", "FS (file separator)", "GS (group separator)", "RS (record separator)", "US (unit separator)", "' ' SPACE" }; /** List of all ASCII delimiters */ static final String[] DEFAULT_DELIMS = new String[127]; static final String[] DEFAULT_IDX_DELIMS = new String[127]; static { int i = 0; for (i = 0; i < WHITE_DELIMS.length; i++) DEFAULT_DELIMS[i] = String.format("%s: '%02d'", WHITE_DELIMS[i],i); for (;i < 126; i++) { String s = null; // Escape HTML entities manually or use StringEscapeUtils from Apache switch ((char)i) { case '&': s = "&amp;"; break; case '<': s = "&lt;"; break; case '>': s = "&gt;"; break; case '\"': s = "&quot;"; break; default : s = Character.toString((char)i); } DEFAULT_DELIMS[i] = String.format("%s: '%02d'", s, i); } for (i = 0; i < 126; i++) DEFAULT_IDX_DELIMS[i] = String.valueOf(i); DEFAULT_DELIMS[i] = "AUTO"; DEFAULT_IDX_DELIMS[i] = String.valueOf(CsvParser.AUTO_SEP); }; }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Parse2.java
package water.api; import water.Job; import water.Key; import water.api.RequestServer.API_VERSION; import water.fvec.ParseDataset2; import water.parser.CustomParser; import water.util.RString; public class Parse2 extends Parse { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help="Response stats and info.") ResponseInfo response_info; // FIXME Parse2 should inherit from Request2 @API(help = "Job key") public Key job_key; // Boolean read-only value; exists==>running, not-exists==>canceled/removed @API(help = "Destination key") public Key destination_key; // Key holding final value after job is removed @API(help="Drop source text from H2O memory after parsing") public Bool delete_on_done = new Bool("delete_on_done",true, ""); @API(help="Should block and wait for result?") protected Bool _blocking = new Bool("blocking",false, ""); public static String link(String k, String content) { RString rs = new RString("<a href='Parse2.query?source_key=%key'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } public Parse2(){_blocking._hideInQuery = true;} @Override protected Response serve() { PSetup p = _source.value(); CustomParser.ParserSetup setup = p != null?p._setup._setup:new CustomParser.ParserSetup(); setup._singleQuotes = _sQuotes.value(); destination_key = Key.make(_dest.value()); try { // Make a new Setup, with the 'header' flag set according to user wishes. Key[] keys = p._keys.toArray(new Key[p._keys.size()]); Job parseJob = ParseDataset2.forkParseDataset(destination_key, keys, setup, delete_on_done.value()); job_key = parseJob.self(); // Allow the user to specify whether to block synchronously for a response or not. if (_blocking.value()) { parseJob.get(); // block until the end of job assert Job.isEnded(job_key) : "Job is still running but we already passed over its end. Job = " + job_key; } return Progress2.redirect(this,job_key,destination_key); } catch( Throwable e) { return Response.error(e); } } @Override public API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } public void fillResponseInfo(Response response) { this.response_info = response.extractInfo(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Predict.java
package water.api; import hex.glm.GLMModel; import water.*; import water.fvec.Frame; import water.fvec.Vec; import water.util.RString; public class Predict extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Model", required = true, filter = Default.class) public Model model; // Type to Model when retired OldModel @API(help = "Data frame", required = true, filter = Default.class) public Frame data; @API(help = "Prediction", filter = Default.class) public Key prediction; public static String link(Key k, String content) { RString rs = new RString("<a href='Predict.query?model=%$key'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } @Override protected Response serve() { Frame fr = null; try { if( model == null ) throw new IllegalArgumentException("Model is required to perform validation!"); // Create a new random key if ( prediction == null ) prediction = Key.make("__Prediction_" + Key.make()); fr = new Frame(prediction,new String[0],new Vec[0]).delete_and_lock(null); fr = model.score(data); fr = new Frame(prediction,fr._names,fr.vecs()); // Jam in the frame key return Inspect2.redirect(this, prediction.toString()); } catch( Throwable t ) { return Response.error(t); } finally { if( fr != null ) fr.unlock(null); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Progress2.java
package water.api; import water.*; import water.api.RequestServer.API_VERSION; public class Progress2 extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Track progress of an ongoing Job"; @API(help = "The Job id being tracked.", json = true, filter = Default.class) public Key job_key; @API(help = "The destination key being produced.", json = true, required = true, filter = Default.class) public Key destination_key; @API(help = "") public float progress = 0.0f; public static String jsonUrl(Key jobKey, Key destKey) { return "2/Progress2.json?job_key=" + jobKey + "&destination_key=" + destKey; } public static Response redirect(Request req, Key jobkey, Key dest) { return Response.redirect(req, "/2/Progress2", "job_key", jobkey, "destination_key", dest); } @Override protected Response serve() { Job jjob = null; if( job_key != null ) jjob = Job.findJob(job_key); if( jjob != null && jjob.isCancelledOrCrashed()) // Handle cancelled job return Response.error(jjob.isCrashed() ? jjob.exception : "Job was cancelled by user!" ); if( jjob == null || jjob.isDone() ) // Handle done job return jobDone(destination_key); return jobInProgress(jjob, destination_key); } /** Return {@link Response} for finished job. */ protected Response jobDone(final Key dst) { return Inspect2.redirect(this, dst.toString()); } /** Return default progress {@link Response}. */ protected Response jobInProgress(final Job job, final Key dst) { progress = job.progress(); return Response.poll(this, (int) (100 * job.progress()), 100, "job_key", job_key.toString(), "destination_key", dst.toString()); } @Override public boolean toHTML(StringBuilder sb) { Job jjob = null; if( job_key != null ) jjob = Job.findJob(job_key); DocGen.HTML.title(sb, jjob != null ? jjob.description : null); DocGen.HTML.section(sb, destination_key.toString()); return true; } @Override protected boolean log() { return false; } @Override public API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/PutValue.java
package water.api; import dontweave.gson.JsonObject; import water.Key; import water.UKV; import water.Value; public class PutValue extends Request { protected final H2OKey _key = new H2OKey(KEY,true); protected final Str _value = new Str(VALUE); protected final Int _rf = new Int(REPLICATION_FACTOR,2,0,255); public PutValue() { _requestHelp = "Stores the given value to the cloud under the specified key." + " The replication factor may also be specified."; _key._requestHelp = "Key under which the value should be stored."; _value._requestHelp = "Value that will be stored under the given key."; _rf._requestHelp = "Desired replication factor of the key. That is on how" + " many nodes should the value be replicated at least"; } @Override public Response serve() { JsonObject response = new JsonObject(); Key k = Key.make(_key.value()._kb, (byte) (int)_rf.value()); Value v = new Value(k,_value.value().getBytes()); UKV.put(k,v); response.addProperty(KEY,k.toString()); response.addProperty(REPLICATION_FACTOR,k.desired()); response.addProperty(VALUE_SIZE,v._max); return Response.done(response); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/QuantilesPage.java
package water.api; import hex.Quantiles; import water.*; import water.util.RString; import water.util.Log; import water.fvec.*; public class QuantilesPage extends Func { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Returns a summary of a fluid-vec frame"; @API(help="An existing H2O Frame key.", required=true, filter=Default.class) public Frame source_key; @API(help="Column to calculate quantile for", required=true, filter=responseFilter.class) public Vec column; class responseFilter extends VecClassSelect { responseFilter() { super("source_key"); } } @API(help = "Quantile desired (0.0-1.0). Median is 0.5. 0 and 1 are min/max", filter = Default.class, dmin = 0, dmax = 1) public double quantile = 0.5; @API(help = "Number of bins used (1-1000000). 1000 recommended", filter = Default.class, lmin = 1, lmax = 1000000) public int max_qbins = 1000; @API(help = "1: Exact result (iterate max 16). 0: One pass approx. 2: Provide both results", filter = Default.class, lmin = 0, lmax = 2) public int multiple_pass = 1; @API(help = "Interpolation between rows. Type 2 (mean) or 7 (linear).", filter = Default.class) public int interpolation_type = 7; // this isn't used yet. column_name is // class colsFilter1 extends MultiVecSelect { public colsFilter1() { super("source_key");} } // @API(help = "Not supported yet (Select columns)", filter=colsFilter1.class) // int[] cols; @API(help = "Maximum number of columns to show quantile", filter = Default.class, lmin = 1) int max_ncols = 1000; @API(help = "Column name.") String column_name; @API(help = "Quantile requested.") double quantile_requested; @API(help = "Interpolation type used.") int interpolation_type_used; @API(help = "False if an exact result is provided, True if the answer is interpolated.") boolean interpolated; @API(help = "Number of iterations actually performed.") int iterations; @API(help = "Result.") public double result; @API(help = "Single pass Result.") double result_single; public static String link(Key k, String content) { RString rs = new RString("<a href='QuantilesPage.query?source=%$key'>"+content+"</a>"); rs.replace("key", k.toString()); return rs.toString(); } @Override protected void init() throws IllegalArgumentException { super.init(); if( source_key == null ) throw new IllegalArgumentException("Source key is missing"); if( column == null ) throw new IllegalArgumentException("Column is missing"); if( column.isEnum() ) throw new IllegalArgumentException("Column is an enum"); if(! ((interpolation_type == 2) || (interpolation_type == 7)) ) { throw new IllegalArgumentException("Unsupported interpolation type. Currently only allow 2 or 7"); } } @Override protected void execImpl() { String[] names = new String[1]; Futures fs = new Futures(); column.rollupStats(fs); fs.blockForPending(); boolean multiPass; Quantiles[] qbins; // just take one here. // it's array because summary2 might use with a single pass list // and an exec single pass approx could pass a threshold list double [] quantiles_to_do = new double[1]; quantiles_to_do[0] = quantile; double approxResult; double exactResult; result_single = Double.NaN; result = Double.NaN; boolean done = false; // approx (fully independent from the multipass) qbins = null; if ( multiple_pass == 0 || multiple_pass == 2 ) { multiPass = false; result_single = Double.NaN; if ( multiple_pass == 0) result = Double.NaN; // These are used as initial params, and setup for the next iteration // be sure to set again if multiple qbins are created double valStart = column.min(); double valEnd = column.max(); // quantile doesn't matter for the map/reduce binning qbins = new Quantiles.BinTask2(max_qbins, valStart, valEnd).doAll(column)._qbins; Log.debug("Q_ for approx. valStart: "+valStart+" valEnd: "+valEnd); // Have to get this internal state, and copy this state for the next iteration // in order to multipass // I guess forward as params to next iteration // while ( (iteration <= maxIterations) && !done ) { // valStart = newValStart; // valEnd = newValEnd; // These 3 are available for viewing, but not necessary to iterate // valRange = newValRange; // valBinSize = newValBinSize; // valLowCnt = newValLowCnt; interpolation_type_used = interpolation_type; quantile_requested = quantiles_to_do[0]; if ( qbins != null ) { // if it's enum it will be null? qbins[0].finishUp(column, quantiles_to_do, interpolation_type, multiPass); column_name = names[0]; // the string name, not the param iterations = 1; done = qbins[0]._done; approxResult = qbins[0]._pctile[0]; interpolated = qbins[0]._interpolated; } else { column_name = ""; iterations = 0; done = false; approxResult = Double.NaN; interpolated = false; } result_single = approxResult; // only the best result if we only ran the approx if ( multiple_pass == 0 ) result = approxResult; // if max_qbins is set to 2? hmm. we won't resolve if max_qbins = 1 // interesting to see how we resolve (should we disallow < 1000? (accuracy issues) but good for test) } if ( multiple_pass == 1 || multiple_pass == 2 ) { final int MAX_ITERATIONS = 16; multiPass = true; exactResult = Double.NaN; double valStart = column.min(); double valEnd = column.max(); for (int b = 0; b < MAX_ITERATIONS; b++) { // we did an approximation pass above we could reuse it for the first pass here? // quantile doesn't matter for the map/reduce binning // cleaned up things so no multipass behavior in qbins..all in finishUp:w // so can reuse the qbins from the approx pass above (if done) if ( !(multiple_pass==2 && b==0) ) { qbins = new Quantiles.BinTask2(max_qbins, valStart, valEnd).doAll(column)._qbins; } iterations = b + 1; if ( qbins == null ) break; else { qbins[0].finishUp(column, quantiles_to_do, interpolation_type, multiPass); Log.debug("\nQ_ multipass iteration: "+iterations+" valStart: "+valStart+" valEnd: "+valEnd); double valBinSize = qbins[0]._valBinSize; Log.debug("Q_ valBinSize: "+valBinSize); valStart = qbins[0]._newValStart; valEnd = qbins[0]._newValEnd; done = qbins[0]._done; if ( done ) break; } } interpolation_type_used = interpolation_type; quantile_requested = quantiles_to_do[0]; if ( qbins != null ) { // if it's enum it will be null? column_name = names[0]; // string name, not the param done = qbins[0]._done; exactResult = qbins[0]._pctile[0]; interpolated = qbins[0]._interpolated; } else { // enums must come this way. Right now we don't seem // to create everything for the normal response, if we reject an enum col. // should fix that. For now, just hack it to not look for stuff column_name = ""; iterations = 0; done = false; exactResult = Double.NaN; interpolated = false; } // all done with it qbins = null; // always the best result if we ran here result = exactResult; } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Remove.java
package water.api; import water.Futures; import water.Lockable; import water.UKV; import water.Value; import dontweave.gson.JsonObject; public class Remove extends Request { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_V1_V2; } protected final H2OExistingKey _key = new H2OExistingKey(KEY); @Override protected Response serve() { try { Lockable.delete(_key.value()._key); } catch( Throwable e ) { return Response.error(e); } JsonObject response = new JsonObject(); response.addProperty(KEY, _key.toString()); return Response.redirect(response, StoreView.class, null); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RemoveAck.java
package water.api; import water.Value; import dontweave.gson.JsonObject; public class RemoveAck extends Request { protected final H2OExistingKey _key = new H2OExistingKey(KEY); @Override protected Response serve() { Value v = _key.value(); String key = v._key.toString(); JsonObject response = new JsonObject(); response.addProperty(RequestStatics.KEY, key); Response r = Response.done(new JsonObject()); r.addHeader("" // + "<div class='alert alert-error'>Are you sure you want to delete key <strong>" + key + "</strong>?<br/>" // + "There is no way back!" // + "</div>" // + "<div style='text-align:center'>" // + "<a href='javascript:history.back()'><button class='btn btn-primary'>No, go back</button></a>" // + "&nbsp;&nbsp;&nbsp;" // + "<a href='Remove.html?" + KEY + "=" + key + "'><button class='btn btn-danger'>Yes!</button></a>" // + "</div>"); return r; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RemoveAll.java
package water.api; import dontweave.gson.JsonObject; import water.Job; import water.util.Log; import water.util.RemoveAllKeysTask; public class RemoveAll extends JSONOnlyRequest { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } @Override protected void registered(RequestServer.API_VERSION version) { super.registered(version); } @Override protected Response serve() { try { Log.info("Removing all keys for the cluster"); // First cancel all jobs and wait for them to be done. Log.info("Cancelling all jobs..."); for (Job job : Job.all()) { job.cancel(); Job.waitUntilJobEnded(job.self()); } Log.info("Finished cancelling all jobs"); RemoveAllKeysTask collector = new RemoveAllKeysTask(); collector.invokeOnAllNodes(); Log.info("Finished removing keys"); } catch( Throwable e ) { return Response.error(e); } JsonObject response = new JsonObject(); return Response.done(response); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Request.java
package water.api; import hex.nb.NBModel; import hex.pca.PCAModel; import java.io.InputStream; import java.lang.annotation.*; import java.util.*; import water.*; import water.api.Request.Validator.NOPValidator; import water.api.RequestServer.API_VERSION; import water.fvec.Frame; import water.util.*; import com.google.common.io.ByteStreams; import dontweave.gson.JsonObject; public abstract class Request extends RequestBuilders { @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.FIELD, ElementType.METHOD}) @Documented public @interface API { String help(); /** Must be specified. */ boolean required() default false; /** For keys. If specified, the key must exist. */ boolean mustExist() default false; int since() default 1; int until() default Integer.MAX_VALUE; Class<? extends Filter> filter() default Filter.class; Class<? extends Filter>[] filters() default {}; /** Forces an input field to also appear in JSON. */ boolean json() default false; long lmin() default Long .MIN_VALUE; long lmax() default Long .MAX_VALUE; double dmin() default Double.NEGATIVE_INFINITY; double dmax() default Double.POSITIVE_INFINITY; boolean hide() default false; String displayName() default ""; boolean gridable() default true; Class<? extends Validator> validator() default NOPValidator.class; ParamImportance importance() default ParamImportance.EXPERT; // Show show up in UI by default. // ============ // NEW API String[] dependsOn() default {}; // Should be field automatically depending on values, valid fields String[] helpFiles() default {}; Direction direction() default Direction.OUT; /** REST path to reference of this field */ String path() default ""; /** Validation String for annotated field - make sense only for input annotation! * It should express a predicate, e.g.: * <code>"/frames/${/parameters/source}/cols/${/parameters/response}/type != 'Float' && ${/parameters/learn_rate} > 1000</code> */ String valid() default ""; /** Is the field is enabled. */ String enabled() default ""; /** Is the field visibled. */ String visible() default ""; /** Predefined values for the field - can be a list of values, or query to * obtain values. * * <code>/frames/${source}/cols?names</code>, <code>1,2,10,15</code> */ String values() default ""; /** * Type of parameter */ Class type() default Void.class; // ========= } public interface Validator<V> extends Freezable { void validateRaw(String value) throws IllegalArgumentException; void validateValue(V value) throws IllegalArgumentException; /** Dummy helper class for NOP validator. */ public static class NOPValidator<V> extends Iced implements Validator<V> { @Override public void validateRaw(String value) { } @Override public void validateValue(V value) { } } } public static interface Filter { boolean run(Object value); } /** NOP filter, use to define a field as input. */ public class Default implements Filter { @Override public boolean run(Object value) { return true; } } // public String _requestHelp; protected Request(String help) { _requestHelp = help; } protected Request() { } public String href() { return href(supportedVersions()[0]); } protected String href(API_VERSION v) { return v.prefix() + getClass().getSimpleName(); } protected RequestType hrefType() { return RequestType.www; } protected boolean log() { return true; } protected void registered(API_VERSION version) { } protected Request create(Properties parms) { return this; } /** Implements UI call. * * <p>This should be call only from * UI layer - i.e., RequestServer.</p> * * @see RequestServer */ protected abstract Response serve(); protected String serveJava() { throw new UnsupportedOperationException("This request does not provide Java code!"); } public NanoHTTPD.Response serve(NanoHTTPD server, Properties parms, RequestType type) { switch( type ) { case help: return wrap(server, HTMLHelp()); case xml: case json: case www: return serveGrid(server, parms, type); case query: { for (Argument arg: _arguments) arg.reset(); String query = buildQuery(parms,type); return wrap(server, query); } case java: checkArguments(parms, type); // Do not check returned query but let it fail in serveJava String javacode = serveJava(); return wrap(server, javacode, RequestType.java); default: throw new RuntimeException("Invalid request type " + type.toString()); } } protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) { String query = checkArguments(parms, type); if( query != null ) return wrap(server, query, type); long time = System.currentTimeMillis(); Response response = null; try { response = serve(); } catch (IllegalArgumentException iae) { // handle illegal arguments response = Response.error(iae); } response.setTimeStart(time); return serveResponse(server, parms, type, response); } public NanoHTTPD.Response serveResponse(NanoHTTPD server, Properties parms, RequestType type, Response response) { // Argh - referencing subclass, sorry for that, but it is temporary hack // for transition between v1 and v2 API if (this instanceof Request2) ((Request2) this).fillResponseInfo(response); if (this instanceof Parse2) ((Parse2) this).fillResponseInfo(response); // FIXME: Parser2 should inherit from Request2 if( type == RequestType.json ) { return response._req == null ? // wrap(server, response.toJson()) : // wrap(server, new String(response._req.writeJSON(new AutoBuffer()).buf()), RequestType.json); } else if (type == RequestType.xml) { if (response._req == null) { String xmlString = response.toXml(); NanoHTTPD.Response r = wrap(server, xmlString, RequestType.xml); return r; } else { String jsonString = new String(response._req.writeJSON(new AutoBuffer()).buf()); org.json.JSONObject jo2 = new org.json.JSONObject(jsonString); String xmlString = org.json.XML.toString(jo2); NanoHTTPD.Response r = wrap(server, xmlString, RequestType.xml); return r; } } return wrap(server, build(response)); } protected NanoHTTPD.Response wrap(NanoHTTPD server, String response) { RString html = new RString(htmlTemplate()); html.replace("CONTENTS", response); return server.new Response(NanoHTTPD.HTTP_OK, NanoHTTPD.MIME_HTML, html.toString()); } protected NanoHTTPD.Response wrap(NanoHTTPD server, JsonObject response) { return server.new Response(NanoHTTPD.HTTP_OK, NanoHTTPD.MIME_JSON, response.toString()); } public NanoHTTPD.Response wrap(NanoHTTPD server, String value, RequestType type) { if( type == RequestType.xml ) return server.new Response(NanoHTTPD.HTTP_OK, NanoHTTPD.MIME_XML, value); if( type == RequestType.json ) return server.new Response(NanoHTTPD.HTTP_OK, NanoHTTPD.MIME_JSON, value); if (type == RequestType.java) return server.new Response(NanoHTTPD.HTTP_OK, NanoHTTPD.MIME_PLAINTEXT, value); return wrap(server, value); } // html template and navbar handling ----------------------------------------- /** * Read from file once. */ private static final String _htmlTemplateFromFile; /** * Written by initializeNavBar(). */ private static volatile String _htmlTemplate; protected String htmlTemplate() { return _htmlTemplate; } static { _htmlTemplateFromFile = loadTemplate("/page.html"); _htmlTemplate = ""; } static final String loadTemplate(String name) { InputStream resource = Boot._init.getResource2(name); try { if( H2O.NAME != null ) return new String(ByteStreams.toByteArray(resource)).replace("%cloud_name", H2O.NAME); } catch( NullPointerException e ) { if( !Log._dontDie ) { Log.err(e); Log.die(name+" not found in resources."); } } catch( Exception e ) { Log.err(e); Log.die(e.getMessage()); } finally { Utils.close(resource); } return null; } private static class MenuItem { public final Request _request; public final String _name; public final boolean _useNewTab; public MenuItem(Request request, String name, boolean useNewTab) { _request = request; _name = name; _useNewTab = useNewTab; } public void toHTML(StringBuilder sb) { sb.append("<li><a href='"); sb.append(_request.href() + _request.hrefType()._suffix); sb.append("'"); if (_useNewTab) { sb.append(" target='_blank'"); } sb.append(">"); sb.append(_name); sb.append("</a></li>"); } } private static HashMap<String, ArrayList<MenuItem>> _navbar = new HashMap(); private static ArrayList<String> _navbarOrdering = new ArrayList(); /** * Call this after the last call addToNavbar(). * This is called automatically for navbar entries from inside H2O. * If user app level code calls addToNavbar, then call this again to make those changes visible. */ public static void initializeNavBar() { _htmlTemplate = initializeNavBar(_htmlTemplateFromFile); } private static String initializeNavBar(String template) { StringBuilder sb = new StringBuilder(); for( String s : _navbarOrdering ) { ArrayList<MenuItem> arl = _navbar.get(s); if( (arl.size() == 1) && arl.get(0)._name.equals(s) ) { arl.get(0).toHTML(sb); } else { sb.append("<li class='dropdown'>"); sb.append("<a href='#' class='dropdown-toggle' data-toggle='dropdown'>"); sb.append(s); sb.append("<b class='caret'></b>"); sb.append("</a>"); sb.append("<ul class='dropdown-menu'>"); for( MenuItem i : arl ) i.toHTML(sb); sb.append("</ul></li>"); } } RString str = new RString(template); str.replace("NAVBAR", sb.toString()); str.replace("CONTENTS", "%CONTENTS"); return str.toString(); } public static Request addToNavbar(Request r, String name) { assert (!_navbar.containsKey(name)); ArrayList<MenuItem> arl = new ArrayList(); boolean useNewTab = false; arl.add(new MenuItem(r, name, useNewTab)); _navbar.put(name, arl); _navbarOrdering.add(name); return r; } public static Request addToNavbar(Request r, String name, String category) { boolean useNewTab = false; return addToNavbar(r, name, category, useNewTab); } public static Request addToNavbar(Request r, String name, String category, boolean useNewTab) { ArrayList<MenuItem> arl = _navbar.get(category); if( arl == null ) { arl = new ArrayList(); _navbar.put(category, arl); _navbarOrdering.add(category); } arl.add(new MenuItem(r, name, useNewTab)); return r; } // TODO clean this stuff, typeahead should take type name protected static Class mapTypeahead(Class c) { if(c != null) { if( PCAModel.class.isAssignableFrom(c) ) return TypeaheadPCAModelKeyRequest.class; if( NBModel.class.isAssignableFrom(c) ) return TypeaheadNBModelKeyRequest.class; if( Model.class.isAssignableFrom(c)) return TypeaheadModelKeyRequest.class; if( Frame.class.isAssignableFrom(c) ) return TypeaheadHexKeyRequest.class; } return TypeaheadKeysRequest.class; } // ========================================================================== public boolean toHTML(StringBuilder sb) { return false; } public void toJava(StringBuilder sb) {} public String toDocGET() { return null; } /** * Example of passing and failing request. Will be prepended with * "curl -s localhost:54321/Request.json". Return param/value pairs that will be used to build up * a URL, and the result from serving the URL will show up as an example. */ public String[] DocExampleSucc() { return null; } public String[] DocExampleFail() { return null; } public String HTMLHelp() { return DocGen.HTML.genHelp(this); } public String ReSTHelp() { return DocGen.ReST.genHelp(this); } // Dummy write of a leading field, so the auto-gen JSON can just add commas // before each succeeding field. @Override public AutoBuffer writeJSONFields(AutoBuffer bb) { return bb.putJSON4("Request2",0); } /** * Request API versioning. * TODO: better solution would be to have an explicit annotation for each request * - something like <code>@API-VERSION(2) @API-VERSION(1)</code> * Annotation will be processed during start of RequestServer and default version will be registered * under /, else /version/name_of_request. */ protected static final API_VERSION[] SUPPORTS_ONLY_V1 = new API_VERSION[] { API_VERSION.V_1 }; protected static final API_VERSION[] SUPPORTS_ONLY_V2 = new API_VERSION[] { API_VERSION.V_2 }; protected static final API_VERSION[] SUPPORTS_V1_V2 = new API_VERSION[] { API_VERSION.V_1, API_VERSION.V_2 }; public API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V1; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RequestArguments.java
package water.api; import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import dontweave.gson.JsonObject; import water.*; import water.Request2.TypeaheadKey; import water.api.Request.Filter; import water.api.Request.Validator; import water.fvec.Frame; import water.fvec.Vec; import water.util.Check; import water.util.RString; import water.util.Utils; import java.io.File; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Properties; /** All arguments related classes are defined in this guy. * * Argument is the base class for all arguments, which then branches to * different still abstract subclasses that specify how are any given HTML input * elements being drawn. * * From these a proper Arguments that define particular value types are then * subclassed. * * When an argument is created, its pointer is stored in the _arguments array * list so that the request knows how many arguments and in which order it has. * * Because request objects and therefore also argument objects one per * application, while the codepath can be multithreaded (server decides this), * the argument state is not preserved in the argument itself, but in the * Record static object that is kept thread local and must be properly * initialized at each iteration by calling reset() method on the argument. * * See the respective classes for more details. * * NOTE add more arguments to this class as they are needed and keep them here. * * @author peta */ public class RequestArguments extends RequestStatics { // =========================================================================== // Helper functions // =========================================================================== /** Returns a json object containing all arguments specified to the page. * * Useful for redirects and polling. */ protected JsonObject argumentsToJson() { JsonObject result = new JsonObject(); for (Argument a : _arguments) { if (a.specified()) result.addProperty(a._name,a.originalValue()); } return result; } protected static int frameColumnNameToIndex(Frame fr, String input, boolean namesOnly) { // first check if we have string match for (int i = 0; fr._names != null && i < fr._names.length; ++i) { String colName = fr._names[i]; if (colName == null) colName = String.valueOf(i); if (colName.equals(input)) return i; } try { if(!namesOnly) { int i = Integer.parseInt(input); if ((i<0) || (i>=fr.vecs().length)) return -1; return i; } } catch (NumberFormatException e) { } return -1; } // =========================================================================== // Record // =========================================================================== /** List of arguments for the request. Automatically filled in by the argument * constructors. */ protected transient ArrayList<Argument> _arguments = new ArrayList(); public ArrayList<Argument> arguments() { return _arguments; } // --------------------------------------------------------------------------- /** Argument state record. * * Contains all state required for the argument and a few functions to operate * on the state. */ protected static class Record<T> { /** Determines the original input value of the argument. null if the value * was not supplied, or was empty. Retains the original value even if the * argument value is wrong. */ public String _originalValue = null; /** Parsed value. If the parse was successful, or default value if the * checking failed, or the argument is not required and was missing. Note * that default value may very well be null and thus you cannot check this * for null of determine validity. */ public T _value = null; /** Reason why the argument is disabled, or null if it is enabled. A * disabled argument cannot be edited by the user yet. */ public String _disabledReason = null; /** True if the argument's value stored in _value is valid, that is either * correctly parsed, or not present and default value used. Note that if * checking fails, the defaultValue is stored in _value, but _valid is * false. */ public boolean _valid = false; /** Returns true if the argument is disabled. */ public boolean disabled() { return _disabledReason != null; } /** Returns true if the argument is valid. */ public boolean valid() { return _valid; } /** Returns if the argument is specified by user. returns true only if it is * valid and parsing the argument was successful. * @return */ public boolean specified() { return valid() && _originalValue != null; } } // A string used to display the query element part of the argument protected static final String _queryHtml = "\n<dl class='dl-horizontal'>" + "<dt style='padding-top:3px'><span rel='tooltip' title='%TOOLTIP_DESCRIPTION' data-placement='left'>%ASTERISK %NAME</span></dt>" + "<dd>%ELEMENT %COMMENT</dd>" + "</dl>" ; // =========================================================================== // Argument // =========================================================================== public abstract class Argument<T> extends Iced implements Filter { @Override public boolean run(Object value) { throw new RuntimeException("Should not be called for special case Argument"); } /** As with request's _requestHelp, this provides the extended help that * will be displayed on the help and wiki pages. Specify this in the * particular request constructor. */ public String _requestHelp; /** True if the argument should not appear in the automatically generated * query. */ public boolean _hideInQuery = false; /** * True if the argument should be only read-only. */ public boolean _readOnly = false; /** * Can be a grid search parameter. */ public boolean _gridable; /** * For keys. If specified, the key must exist. */ public boolean _mustExist; /** Value validator. */ public Validator<T> _validator; /** Override this method to provide parsing of the input string to the Java * expected value. The input is guaranteed to be non-empty when this method * is called and all prerequisities are guaranteed to be valid before this * method is called. */ protected abstract T parse(String input) throws IllegalArgumentException; /** Returns the default value of the argument. Note that the method will be * called also on required arguments, in which case it is ok return null. * * It is kept abstract because defining a proper default value might be * tricky and in many case you do not want it to be null. Overriding it * always makes you think:) */ protected abstract T defaultValue(); /** Returns the javascript code that will be executed when the query is * loaded that associates the given callback JS function with the on change * event of the input. This method is only called if the element should * refresh the webpage upon its change. */ protected abstract String jsRefresh(String callbackName); /** Returns the javascript code that will be executed when the value of * the argument is to be determined. It must contain a return statement, * that returns the string that should be sent back to the request for the * given arhument. */ protected abstract String jsValue(); /** If there is any additional javascript that should be dumped to the * query page, it should be defined here. Please follow chaining rules. */ protected String jsAddons() { return ""; } /** Returns the HTML elements of the argument query only. This should return * the elements in HTML that will be used to enter the value. For instance * the input text, selection, etc. */ protected abstract String queryElement(); /* A little bonus extra text out to the right */ protected String queryComment() { return ""; } /** Returns the query description. This is a concise description of a * correct value for the argument. generally used as a placeholder in the * html query elements. */ protected abstract String queryDescription(); /** Returns a list of possible error strings, that could be thrown in an * IllegalArgumentException. */ protected String[] errors() { return null; } /** Any query addons can be specified here. These will be displayed with * the query html code and should be used for instance for default value * calculators, etc. */ protected String queryAddons() { return ""; } public String getName() { return _name; } public String getDisplayName() { return _displayName; } /** Returns the html query for the given argument, including the full * formatting. That means not only the queryElement, but also the argument * name in front of it, etc. * * You may want to override this if you want different form layouts to be * present. */ protected String query() { RString result = new RString(_queryHtml); result.replace("ID",_name); result.replace("NAME", _displayName != null ? _displayName : JSON2HTML(_name)); if (disabled()) result.replace("ELEMENT","<div class='alert alert-info' style='padding-top:4px;padding-bottom:4px;margin-bottom:5px'>"+record()._disabledReason+"</div>"); else result.replace("ELEMENT",queryElement()); result.replace("TOOLTIP_DESCRIPTION", queryDescription()); if (!disabled()) result.replace("COMMENT",queryComment()); if (_required) result.replace("ASTERISK","<span style='color:#ff0000'>* </span>"); return result.toString(); } /** Creates the request help page part for the given argument. Displays its * JSON name, query name (the one in HTML), value type and the request help * provided by the argument. */ public final JsonObject requestHelp() { JsonObject r = new JsonObject(); r.addProperty(NAME, _name); r.addProperty(DESCRIPTION, queryDescription()); r.addProperty(HELP, _requestHelp); return r; } /** Name of the argument. This must correspond to the name of the JSON * request argument. */ public String _name, _displayName; /** True if the argument is required, false if it may be skipped. */ public boolean _required; /** True if change of the value in the query controls should trigger an * automatic refresh of the query form. * * This is set by the setrefreshOnChange() method. It is automatically set * for any controls that are prerequisites for other controls and can be * manually select for other controls by users (do it in the request * constructor). */ private boolean _refreshOnChange; /** List of all prerequisite arguments for the current argument. All the * prerequisite arguments must be created before the current argument. */ public transient ArrayList<Argument<T>> _prerequisites = null; /** The thread local argument state record. Must be initialized at the * beginning of each request before it can be used. */ private transient ThreadLocal<Record> _argumentRecord = new ThreadLocal(); /** * If argument has been created reflectively from a request field. */ public transient Field _field; /** Creates the argument of given name. Also specifies whether the argument * is required or not. This cannot be changed later. */ protected Argument(String name, boolean required) { assert Check.paramName(name); _name = name; _required = required; _refreshOnChange = false; _arguments.add(this); } /** Adds the given argument as a prerequisite. This means that current * argument will not be checked and/or reported in queries as a control form * unless all its prerequisite arguments are in a valid state. (the argument * will be disabled if not all its prerequisites are satisfied). */ protected final void addPrerequisite(Argument arg) { if (_prerequisites == null) _prerequisites = new ArrayList(); _prerequisites.add(arg); arg.setRefreshOnChange(); } /** Returns the thread local argument state record. */ protected final Record<T> record() { return _argumentRecord.get(); } /** Disables the argument with given reason. If the argument is already * disabled, its reason is overwritten by the new one. * * NOTE disable(null) effectively enables the argument, that is why the * assert! */ public final void disable(String reason) { assert (reason != null); record()._disabledReason = reason; } /** Disables the argument and makes its input value empty. This is the * preferred way of disabling arguments. */ public final void disable(String reason, Properties args) { assert (reason != null); disable(reason); args.remove(_name); } /** Returns whether the argument is disabled or not. */ public final boolean disabled() { return record().disabled(); } /** Makes the argument refresh the query page on its change automatically. * If you want this behavior to be disabled for the argument, overwrite this * method to error. */ public void setRefreshOnChange() { _refreshOnChange = true; } /** Returns true if the argument refreshes the query automatically on its * change. */ public boolean refreshOnChange() { return _refreshOnChange; } /** Returns true if the argument is valid. Valid means specified by user * and parsed properly, or not required and not specified. */ public final boolean valid() { // return record().valid(); return record() != null && record().valid(); } /** Returns true if the argument is specified by the user. That is if the * argument value was submitted by the user and parsed correctly. */ public final boolean specified() { return record() != null && record().specified(); } /** Returns the value of the argument. This is either the value parsed, if * specified, or defaultValue. Note that default value is returned also for * invalid arguments. */ public final T value() { return record()._value; } /** Returns the input value submitted by the user, if specified. */ public final String originalValue() { return record()._originalValue; } /** Resets the argument by creating it a new thread local state. Everything * is null and the argument is not valid. */ public final void reset() { _argumentRecord.set(new Record()); } /** Checks that the argument supplied is correct. This method is called for * each argument and is given the HTTP supplied argument value. If the value * was not supplied, input contains an empty string. * * The argument must already be reseted before calling this method. * * If the argument is disabled, the function does not do anything except * setting the original value in the record. * * If the prerequisites of the argument are not all valid, then the argument * is disabled and function returns. * * Then the argument is parsed if provided, or an error thrown if the input * is empty and the argument is required. * * At the end of the function the value is either the result of a successful * parse() call or a defaultValue or null if the argument is disabled. * However if the argument is disabled a defaultValue should not be called. */ public void check(RequestQueries callInstance, String input) throws IllegalArgumentException { // get the record -- we assume we have been reset properly Record record = record(); // check that the input is canonical == value or null and store it to the // record if (input.isEmpty()) input = null; record._originalValue = input; // there is not much to do if we are disabled if (record.disabled()) { record._value = null; return; } // check that we have all prerequisites properly initialized if (_prerequisites != null) { for (Argument dep : _prerequisites) if (!dep.valid()) { record._disabledReason = "Not all prerequisite arguments have been supplied: "+dep._name; record._value = null; return; } } // if input is null, throw if required, otherwise use the default value if (input == null) { if (_required) throw new IllegalArgumentException("Argument '"+_name+"' is required, but not specified"); record._value = defaultValue(); record._valid = true; // parse the argument, if parse throws we will still be invalid correctly } else { try { record._value = parse(input); record._valid = true; if(callInstance instanceof Request2) ((Request2) callInstance).set(this, input, record._value); } catch( IllegalArgumentException e) { //record._value = defaultValue(); throw e; } } } } // =========================================================================== // InputText // =========================================================================== /** Argument that uses simple text input to define its value. * * This is the simplest argument. Uses the classic input element. All * functionality is supported. * * @param <T> */ public abstract class InputText<T> extends Argument<T> { public InputText(String name, boolean required) { super(name, required); } /** A query element is the default HTML form input. * * The id of the element is the name of the argument. Placeholder is the * query description and the value is filled in either as the value * submitted, or as the toString() method on defaultValue. */ @Override protected String queryElement() { // first determine the value to put in the field Record record = record(); String value = record._originalValue; // if no original value was supplied, try the one provided by the // default value if (value == null) { T v = defaultValue(); value = (v == null) ? "" : v.toString(); } if (_name == "path" || _name == "key" || _name == "source" || _name == "data_key" || _name == "source_key" || _name == "model_key" || _name == "thresholds" || _name == "model") { return "<input autocomplete=\"off\"" + (_readOnly ? " disabled" : "")+ " class='span5' type='text' name='"+_name+"' id='"+_name+"' placeholder='"+queryDescription()+"' "+ (!value.isEmpty() ? (" value='"+value+"' />") : "/>"); } else { return "<input " + (_readOnly ? " disabled" : "")+ " class='span5' type='text' name='"+_name+"' id='"+_name+"' placeholder='"+queryDescription()+"' "+ (!value.isEmpty() ? (" value='"+value+"' />") : "/>"); } } /** JS refresh is a default jQuery hook to the change() method. */ @Override protected String jsRefresh(String callbackName) { return "$('#"+_name+"').change('"+_name+"',"+callbackName+");"; } /** JS value is the simple jQuery val() method. */ @Override protected String jsValue() { return "return $('#"+_name+"').val();"; } } // =========================================================================== // TypeaheadInputText // =========================================================================== /** Typeahead enabled text input. * * Typeahead is enabled using the jQuery typeahead plugin. You must specify * the JSON request which provides the typeahead, and the data name in the * response that contains the array of strings corresponding to the typeahead * options. Optionally you can specify the typeahead limit (how many options * will be displayed), which is 1024 by default. * * The typeahead json request must take Str argument filter and Int optional * argument limit. */ public abstract class TypeaheadInputText<T> extends InputText<T> { /** href of the json request supplying the typeahead values. */ protected final String _typeaheadHref; /** Typeahead limit. If more than this limit options will be available, the * typeahead will be disabled. */ protected final int _typeaheadLimit; /** Creates the typeahead. */ protected TypeaheadInputText(Class<? extends TypeaheadRequest> href, String name, boolean required) { super(name, required); _typeaheadHref = href.getSimpleName(); _typeaheadLimit = 1024; } /** Adds the json to hook initialize the typeahead functionality. It is * jQuery typeahead plugin standard initialization with async filler. */ @Override protected String jsAddons() { RString s = new RString("" + "$('#%ID').typeahead({\n" + " source:\n" + " function(query,process) {\n" + " return $.get('%HREF', { filter: query, limit: %LIMIT }, function (data) {\n" + " return process(data.%DATA_NAME);\n" + " });\n" + " },\n" + "});\n" + "\n"); s.replace("ID", _name); s.replace("HREF", _typeaheadHref); s.replace("LIMIT", _typeaheadLimit); s.replace("DATA_NAME", ITEMS); return super.jsAddons()+s.toString(); } } // =========================================================================== // InputCheckBox // =========================================================================== /** A boolean argument that is represented as the checkbox. * * The only allowed values for a boolean checkbox are "0" for false, "1" for * true. If the argument is not required, then default value will be used. * * Please note that due to the nature of a checkbox, the html query will * always specify this argument to its default value, or to false if the user * did not specify it explicitly. */ public abstract class InputCheckBox extends Argument<Boolean> { /** Default value. */ public final transient Boolean _defaultValue; /** Creates the argument with specified default value. */ public InputCheckBox(String name, boolean defaultValue) { super(name, false); // checkbox is never required _defaultValue = defaultValue; } /** Creates the argument as required one. This has only effect on JSON, for * HTML it means the default value is false effectively. */ public InputCheckBox(String name) { super(name, true); _defaultValue = null; } /** Parses the value. 1 to true and 0 to false. Anything else is an error. */ @Override public Boolean parse(String input) { if (input.equals("1")) return true; if (input.equals("0")) return false; if (input.equals("true")) return true; if (input.equals("false")) return false; throw new H2OIllegalArgumentException(this, input+" is not valid boolean value. Only 1 and 0 are allowed."); } /** Displays the query element. This is just the checkbox followed by the * description. */ @Override protected String queryElement() { // first determine the value to put in the field Record record = record(); String value = record._originalValue; // if no original value was supplied, use the provided one if (value == null) { Boolean v = defaultValue(); value = ((v == null) || (v == false)) ? "" : "1" ; } return "<input value='1' class='span5' type='checkbox' name='"+_name+"' id='"+_name+"' "+ (value.equals("1") ? (" checked />") : "/>")+"&nbsp;&nbsp;"+queryDescription(); } /** Refresh only taps to jQuery change event. */ @Override protected String jsRefresh(String callbackName) { return "$('#"+_name+"').change('"+_name+"',"+callbackName+");"; } /** Returns 1 if the checkbox is checked and 0 otherwise. */ @Override protected String jsValue() { return "return $('#"+_name+"').is(':checked') ? '1' : '0';"; } /** Returns the default value. */ @Override protected Boolean defaultValue() { return _defaultValue; } } // =========================================================================== // InputSelect // =========================================================================== /** Select element from the list of options. * * Array of values and arrays of names can be specified together with the * selected element's value. */ public abstract class InputSelect<T> extends Argument<T> { /** Override this method to provide the values for the options. These will * be the possible values returned by the form's input and should be the * possible values for the JSON argument. */ protected abstract String[] selectValues(); /** Returns which value should be selected. This is *not* the default value * itself, as the default values may be of any type, but the input value * that should be selected in the browser. */ protected abstract String selectedItemValue(); /** Override this method to determine the value names, that is the names * displayed in the browser. Return null, if the value strings should be * used (this is default behavior). */ protected String[] selectNames() { return null; } /** Constructor just calls super. */ public InputSelect(String name, boolean required) { super(name, required); } /** Displays the query element. It is a select tag with option tags inside. * If the argument is required then additional empty value is added with * name "Please select..." that ensures that the user selects actual value. */ @Override protected String queryElement() { StringBuilder sb = new StringBuilder(); sb.append("<select id='"+_name+"' name='"+_name+"'>"); String selected = selectedItemValue(); String[] values = selectValues(); String[] names = selectNames(); if (names == null) names = values; assert (values.length == names.length); if (_required) sb.append("<option value=''>Please select...</option>"); for (int i = 0 ; i < values.length; ++i) { if (values[i].equals(selected)) sb.append("<option value='"+values[i]+"' selected>"+names[i]+"</option>"); else sb.append("<option value='"+values[i]+"'>"+names[i]+"</option>"); } sb.append("</select>"); return sb.toString(); } /** Refresh is supported using standard jQuery change event. */ @Override protected String jsRefresh(String callbackName) { return "$('#"+_name+"').change('"+_name+"',"+callbackName+");"; } /** Get value is supported by the standard val() jQuery function. */ @Override protected String jsValue() { return "return $('#"+_name+"').val();"; } } // =========================================================================== // MultipleCheckbox // =========================================================================== /** Displays multiple checkboxes for different values. Returns a list of the * checked values separated by commas. */ public abstract class MultipleSelect<T> extends Argument<T> { /** Override this method to provide the values for the options. These will * be the possible values returned by the form's input and should be the * possible values for the JSON argument. */ protected abstract String[] selectValues(); /** Returns true if the given option (by its value) is selected. False * otherwise. */ protected abstract boolean isSelected(String value); /** Override this method to determine the value names, that is the names * displayed in the browser. Return null, if the value strings should be * used (this is default behavior). */ protected String[] selectNames() { return null; } /** Constructor just calls super. Is never required, translates to the * default value. */ public MultipleSelect(String name) { super(name, false); } /** Displays the query element. It is a tabled list of all possibilities * with an optional scrollbar on the right. */ @Override protected String queryElement() { String[] values = selectValues(); String[] names = selectNames(); if (names == null) names = values; assert (values.length == names.length); if (values.length == 0) return "<div class='alert alert-error'>No editable controls under current setup</div>"; StringBuilder sb = new StringBuilder(); sb.append("<select multiple"); sb.append(" size='").append(Math.min(20, values.length)).append("'"); sb.append(" id='").append(_name).append("' >"); for (int i = 0 ; i < values.length; ++i) { sb.append("<option value='").append(values[i]).append("' "); if( isSelected(values[i]) ) sb.append("selected='true' "); sb.append(">").append(names[i]).append("</option>"); } sb.append("</select>"); return sb.toString(); } /** Refresh is supported using standard jQuery change event. Each * possibility's checkbox is instrumented. */ @Override protected String jsRefresh(String callbackName) { return "$('#"+_name+"').change('"+_name+"',"+callbackName+");"; } /** Get value is supported by a JS function that enumerates over the * possibilities. If checked, the value of the possibility is appended to * a comma separated list. */ @Override protected String jsValue() { return "var tmp = $('#"+_name+"').val(); return tmp == null ? \"\" : tmp.join(',');"; } } // =========================================================================== // MultipleText // =========================================================================== private static final char JS_SEP = '='; private static final String _multipleTextValueJS = " var str = ''\n" + " for (var i = 0; i < %NUMITEMS; ++i) {\n" + " var element = $('#%NAME'+i);\n" + " if (element.val() != '') {\n" + " if (str == '')\n" + " str = element.attr('name') + '" + JS_SEP + "' +element.val();\n" + " else\n" + " str = str + ',' + element.attr('name') + '" + JS_SEP + "' + element.val();\n" + " }\n" + " }\n" + " return str;\n" ; public abstract class MultipleText<T> extends Argument<T> { protected abstract String[] textValues(); protected abstract String[] textNames(); protected String[] textPrefixes() { return null; } protected String[] textSuffixes() { return null; } protected String textSuffix() { return null; } public MultipleText(String name, boolean required) { super(name, required); } /** Displays the query element. It is a tabled list of all possibilities * with an optional scrollbar on the right. */ @Override protected String queryElement() { StringBuilder sb = new StringBuilder(); sb.append("<div style='max-height:300px;overflow:auto'>"); String[] prefixes = textPrefixes(); String[] values = textValues(); String[] names = textNames(); String[] suffixes = textSuffixes(); if (prefixes == null) prefixes = names; if (suffixes == null && textSuffix() != null) { suffixes = new String[names.length]; String suffix = textSuffix(); for(int i = 0; i<names.length; i++) suffixes[i] = suffix; } if (values == null) { values = new String[prefixes.length]; for (int i = 0; i < values.length; ++i) values[i] = ""; } assert (prefixes.length == values.length); if (values.length == 0) sb.append("<div class='alert alert-error'>No editable controls under current setup</div>"); for (int i = 0 ; i < values.length; ++i) { sb.append("<div class='input-prepend" + (suffixes!=null?" input-append":"") + "'>"); sb.append("<span class='add-on'>" + prefixes[i]+"</span>"); sb.append("<input autocomplete=\"off\" class='span3' name='"+names[i]+"' id='"+_name+String.valueOf(i)+"' type='text' value='"+values[i]+"' placeholder='"+queryDescription()+"'>"); if (suffixes!=null) sb.append("<span class='add-on'>" + suffixes[i]+"</span>"); sb.append("</div>"); } sb.append("</div>"); return sb.toString(); } /** Refresh is supported using standard jQuery change event. Each text * input is instrumented. */ @Override protected String jsRefresh(String callbackName) { int size = textNames().length; StringBuilder sb = new StringBuilder(); for (int i = 0; i < size; ++i) sb.append("$('#"+_name+String.valueOf(i)+"').change('"+_name+"',"+callbackName+");\n"); return sb.toString(); } /** Get value is supported by a JS function that enumerates over the * possibilities. If checked, the value of the possibility is appended to * a comma separated list. */ @Override protected String jsValue() { int size = textNames().length; RString result = new RString(_multipleTextValueJS); result.replace("NUMITEMS",size); result.replace("NAME",_name); return result.toString(); } } // =========================================================================== // UserDefinedArguments // // Place your used defined arguments here. // // =========================================================================== // --------------------------------------------------------------------------- // Str // --------------------------------------------------------------------------- /** A string value. * * Any string can be a proper value. If required, empty string is not allowed. */ public class Str extends InputText<String> { public final String _defaultValue; public Str(String name) { super(name,true); _defaultValue = null; } public Str(String name, String defaultValue) { super(name, false); _defaultValue = defaultValue; } @Override protected String parse(String input) throws IllegalArgumentException { return input; } @Override protected String defaultValue() { return _defaultValue; } @Override protected String queryDescription() { return _required ? "Any non-empty string" : "Any string"; } } public static class NumberSequence { public final double [] _arr; final String _str; final boolean _ints; public NumberSequence(double [] val, String str, boolean ints) { _arr = val; _str = str; _ints = ints; } public NumberSequence(String str, boolean mul, double defaultStep) { this(parseArray(str,mul,defaultStep),str, false); } static double [] parseArray(String input, boolean mul, double defaultStep) { String str = input.trim().toLowerCase(); if(str.startsWith("c(") && str.endsWith(")")) str = str.substring(2,str.length()-1); if( str.startsWith("seq") ) { throw new RuntimeException("unimplemented"); } if( str.contains(":") ) return parseGenerator(input, mul, defaultStep); else if( str.contains(",") ) { String [] parts = str.split(","); double [] res = new double[parts.length]; for(int i = 0; i < parts.length; ++i) res[i] = Double.parseDouble(parts[i]); return res; } else { return new double [] {Double.parseDouble(str)}; } } public static double[] parseGenerator(String input, boolean mul, double defaultStep) { String str = input.trim().toLowerCase(); String [] parts = str.split(":"); if(parts.length != 2 && parts.length != 3 )throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); double step = defaultStep; if( parts.length == 3 ){ step = Double.parseDouble(parts[2]); } double from = Double.parseDouble(parts[0]); double to = Double.parseDouble(parts[1]); if(to == from) return new double[]{from}; if(to < from)throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); if(mul?(step <= 1):(step<=0))throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); // make sure we have format from < to double [] res = new double[1024]; int i = 0; while(from <= to){ res[i++] = from; if(i == res.length)res = Arrays.copyOf(res, res.length + Math.max(1, res.length >> 1)); if( mul) from *= step; else from += step; } return Arrays.copyOf(res,i); } static NumberSequence parse(String input, boolean mul, double defaultStep) { return new NumberSequence(parseArray(input, mul, defaultStep),null, false); } @Override public String toString() { if(_str != null)return _str; if(_arr == null || _arr.length == 0)return""; StringBuilder res = new StringBuilder(); for(int i = 0; i < _arr.length; ++i) { if(i > 0) res.append(","); res.append(_ints ? "" + (int) _arr[i] : _arr[i]); } return res.toString(); } } public static class NumberSequenceFloat { public final float [] _arr; final String _str; final boolean _ints; public NumberSequenceFloat(float [] val, String str, boolean ints) { _arr = val; _str = str; _ints = ints; } public NumberSequenceFloat(String str, boolean mul, float defaultStep) { this(parseArray(str,mul,defaultStep),str, false); } static float [] parseArray(String input, boolean mul, float defaultStep) { String str = input.trim().toLowerCase(); if(str.startsWith("c(") && str.endsWith(")")) str = str.substring(2,str.length()-1); if( str.startsWith("seq") ) { throw new RuntimeException("unimplemented"); } if( str.contains(":") ) return parseGenerator(input, mul, defaultStep); else if( str.contains(",") ) { String [] parts = str.split(","); float [] res = new float[parts.length]; for(int i = 0; i < parts.length; ++i) res[i] = Float.parseFloat(parts[i]); return res; } else { return new float [] {Float.parseFloat(str)}; } } public static float[] parseGenerator(String input, boolean mul, float defaultStep) { String str = input.trim().toLowerCase(); String [] parts = str.split(":"); if(parts.length != 2 && parts.length != 3 )throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); float step = defaultStep; if( parts.length == 3 ){ step = Float.parseFloat(parts[2]); } float from = Float.parseFloat(parts[0]); float to = Float.parseFloat(parts[1]); if(to == from) return new float[]{from}; if(to < from)throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); if(mul?(step <= 1):(step<=0))throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); // make sure we have format from < to float [] res = new float[1024]; int i = 0; while(from <= to){ res[i++] = from; if(i == res.length)res = Arrays.copyOf(res, res.length + Math.max(1, res.length >> 1)); if( mul) from *= step; else from += step; } return Arrays.copyOf(res,i); } static NumberSequenceFloat parse(String input, boolean mul, float defaultStep) { return new NumberSequenceFloat(parseArray(input, mul, defaultStep),null, false); } @Override public String toString() { if(_str != null)return _str; if(_arr == null || _arr.length == 0)return""; StringBuilder res = new StringBuilder(); for(int i = 0; i < _arr.length; ++i) { if(i > 0) res.append(","); res.append(_ints ? "" + (int) _arr[i] : _arr[i]); } return res.toString(); } } public class RSeq extends InputText<NumberSequence> { boolean _multiplicative; transient NumberSequence _dVal; double _defaultStep; String _comment; @Override public String queryComment() { if( disabled() ) return ""; if( _comment != null ) return _comment; return "Comma separated list of values. Or range specified as from:to:step" + (_multiplicative?"(*).":"(+)."); } public RSeq(String name, boolean req, boolean mul){ this(name,req,null,mul); } public RSeq(String seq, boolean mul){ this("", false, new NumberSequence(seq, mul, 0), mul); } public RSeq(String name, boolean req, NumberSequence dVal, boolean mul){ this(name, req, dVal, mul, null); } public RSeq(String name, boolean req, NumberSequence dVal, boolean mul, String comment){ super(name,req); _dVal = dVal; _multiplicative = mul; _defaultStep = mul?10:1; _comment = comment; } @Override protected NumberSequence parse(String input) throws IllegalArgumentException { try { return NumberSequence.parse(input, _multiplicative, _defaultStep); } catch( NumberFormatException e) { // allow grid search number sequences to pass without an exception (if all numbers except for [(),] are valid) if (input.contains("(") && input.contains(")")) { try { String[] s = input.replaceAll("[()]", "").split(","); //remove ( and ) and split on , for (String num : s) Double.parseDouble(num); //try to parse every number as Double return NumberSequence.parse(s[0], _multiplicative, _defaultStep); //HACK: report back the first number (to satisfy the UI) } catch (NumberFormatException e2) { throw new IllegalArgumentException("Value " + input + " is not a valid number sequence."); } } else throw new IllegalArgumentException("Value " + input + " is not a valid number sequence."); } } @Override protected NumberSequence defaultValue() { return _dVal; } @Override protected String queryDescription() { return "Number sequence. Comma separated list of values. Or range specified as from:to:step."; } } public class RSeqFloat extends InputText<NumberSequenceFloat> { boolean _multiplicative; transient NumberSequenceFloat _fVal; float _defaultStep; String _comment; @Override public String queryComment() { if( disabled() ) return ""; if( _comment != null ) return _comment; return "Comma separated list of values. Or range specified as from:to:step" + (_multiplicative?"(*).":"(+)."); } public RSeqFloat(String name, boolean req, boolean mul){ this(name,req,null,mul); } public RSeqFloat(String seq, boolean mul){ this("", false, new NumberSequenceFloat(seq, mul, 0), mul); } public RSeqFloat(String name, boolean req, NumberSequenceFloat fVal, boolean mul){ this(name, req, fVal, mul, null); } public RSeqFloat(String name, boolean req, NumberSequenceFloat fVal, boolean mul, String comment){ super(name,req); _fVal = fVal; _multiplicative = mul; _defaultStep = mul?10:1; _comment = comment; } @Override protected NumberSequenceFloat parse(String input) throws IllegalArgumentException { try { return NumberSequenceFloat.parse(input, _multiplicative, _defaultStep); } catch( NumberFormatException e) { throw new IllegalArgumentException("Value "+input+" is not a valid number sequence."); } } @Override protected NumberSequenceFloat defaultValue() { return _fVal; } @Override protected String queryDescription() { return "Number sequence. Comma separated list of values. Or range specified as from:to:step."; } } // --------------------------------------------------------------------------- // Int // --------------------------------------------------------------------------- public class Int extends InputText<Integer> { public final transient Integer _defaultValue; public final int _min; public final int _max; public Int(String name) { this(name, Integer.MIN_VALUE, Integer.MAX_VALUE); } public Int(String name, int min, int max) { super(name,true); _defaultValue = null; _min = min; _max = max; } public Int(String name, Integer defaultValue) { this(name, defaultValue, Integer.MIN_VALUE, Integer.MAX_VALUE); } public Int(String name, Integer defaultValue, int min, int max) { super(name,false); _defaultValue = defaultValue; _min = min; _max = max; } @Override protected Integer parse(String input) throws IllegalArgumentException { try { int i = Integer.parseInt(input); if ((i< _min) || (i > _max)) throw new H2OIllegalArgumentException(this, "Value "+i+" is not between "+_min+" and "+_max+" (inclusive)"); return i; } catch( NumberFormatException e ) { throw new H2OIllegalArgumentException(this, "Value "+input+" is not a valid integer."); } } @Override protected Integer defaultValue() { return _defaultValue; } @Override protected String queryDescription() { return ((_min == Integer.MIN_VALUE) && (_max == Integer.MAX_VALUE)) ? "Integer value" : "Integer from "+_min+" to "+_max; } } // --------------------------------------------------------------------------- // LongInt // --------------------------------------------------------------------------- public class LongInt extends InputText<Long> { public final transient long _defaultValue; public final long _min; public final long _max; public final String _comment; public LongInt(String name, long min, long max) { this(name,false,0,min,max,""); } public LongInt(String name, long defaultValue, String comment) { this(name, false, defaultValue, Long.MIN_VALUE, Long.MAX_VALUE, comment); } public LongInt(String name, boolean req, long defaultValue, long min, long max, String comment) { super(name, req); _defaultValue = defaultValue; _min = min; _max = max; _comment = comment; } @Override protected Long parse(String input) throws IllegalArgumentException { long i = 0; try { i = Long.parseLong(input); } catch( NumberFormatException e ) { double d = Double.NaN; try { d = Double.parseDouble(input); i = (long)d; } catch ( NumberFormatException xe ) { d = i - 1; } // make d different from i if( i!=d ) throw new H2OIllegalArgumentException(this, "Value '"+input+"' is not a valid long integer."); } if ((i< _min) || (i > _max)) throw new H2OIllegalArgumentException(this, "Value "+i+" is not between "+_min+" and "+_max+" (inclusive)"); return i; } @Override protected Long defaultValue() { return _defaultValue; } @Override protected String queryComment() { return _comment; } @Override protected String queryDescription() { return ((_min == Long.MIN_VALUE) && (_max == Long.MAX_VALUE)) ? "Integer value" : "Integer from "+_min+" to "+_max; } } // --------------------------------------------------------------------------- // Real // --------------------------------------------------------------------------- public class Real extends InputText<Double> { public transient final Double _defaultValue; public double _min; public double _max; public final String _comment; public Real(String name) { this(name, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); } public Real(String name, double min, double max) { super(name,true); _defaultValue = null; _min = min; _max = max; _comment = ""; } public Real(String name, Double defaultValue) { this(name, false, defaultValue, null, null, ""); } public Real(String name, Double defaultValue, String comment) { this(name, false, defaultValue, null, null, comment); } public Real(String name, Double defaultValue, double min, double max, String comment) { this(name, false, defaultValue, min, max, comment); } public Real(String name, boolean req, Double defaultValue, Double min, Double max, String comment) { super(name,req); _defaultValue = defaultValue; _min = min != null ? min : Double.NEGATIVE_INFINITY; _max = max != null ? max : Double.POSITIVE_INFINITY; _comment = comment; } @Override protected Double parse(String input) throws IllegalArgumentException { try { double i = Double.parseDouble(input); if ((i< _min) || (i > _max)) throw new H2OIllegalArgumentException(this, "Value "+i+" is not between "+_min+" and "+_max+" (inclusive)"); return i; } catch( NumberFormatException e ) { throw new H2OIllegalArgumentException(this, "Value "+input+" is not a valid real number."); } } @Override protected Double defaultValue() { return _defaultValue; } @Override protected String queryComment() { return _comment; } @Override protected String queryDescription() { return ((_min == Double.NEGATIVE_INFINITY) && (_max == Double.POSITIVE_INFINITY)) ? "Real value" : "Real from "+_min+" to "+_max; } } // --------------------------------------------------------------------------- // Bool // --------------------------------------------------------------------------- public class Bool extends InputCheckBox { public final String _description; public Bool(String name, boolean defaultValue, String description) { super(name, defaultValue); _description = description; } public void setValue(boolean b){ record()._value = b; record()._valid = true; record()._originalValue = b?"1":"0"; } @Override protected String queryDescription() { return _description; } } public class ClassifyBool extends Bool { private FrameClassVec _fcv; public ClassifyBool(String name, FrameClassVec fcv) { super(name,false,"Classification or Regression"); addPrerequisite(_fcv=fcv); setRefreshOnChange(); } @Override public Boolean parse(String input) { boolean b=false; if( false ) ; else if (input.equals("1")) b= true; else if (input.equals("0")) b= false; else if (input.equals("true")) b= true; else if (input.equals("false")) b= false; else throw new H2OIllegalArgumentException(this, input+" is not valid boolean value. Only 1 and 0 are allowed."); Vec vec = _fcv.value(); if( !vec.isInt() && b ) throw new H2OIllegalArgumentException(this, "Float response allows only regression!"); if( vec.isEnum() && !b ) throw new H2OIllegalArgumentException(this, "Categorical response allows only classification!"); return b; } @Override protected Boolean defaultValue() { return _fcv.value().isInt(); // Allows only float columns for regression } } public class DRFCopyDataBool extends Bool { private TypeaheadKey _frkey; public DRFCopyDataBool(String name, TypeaheadKey frkey) { super(name,false,"Run on one node only; no network overhead but fewer cpus used. Suitable for small datasets."); addPrerequisite(_frkey=frkey); setRefreshOnChange(); } protected Frame fr() { return DKV.get(_frkey.value()).get(); } @Override public Boolean parse(String input) { boolean b=false; if( false ) ; else if (input.equals("1")) b= true; else if (input.equals("0")) b= false; else if (input.equals("true")) b= true; else if (input.equals("false")) b= false; else throw new H2OIllegalArgumentException(this, input+" is not valid boolean value. Only 1 and 0 are allowed."); return b; } @Override protected Boolean defaultValue() { // Can we allocate ALL of the dataset locally? long bs = fr().byteSize(); if( !MemoryManager.tryReserveTaskMem(bs) ) return false; // Also, do we have enough chunks to run it well globally? if( fr().anyVec().nChunks() >= 2*H2O.CLOUD.size() ) return false; // Less than 2 chunks per node, and fits locally... default to local-only return true; } } // --------------------------------------------------------------------------- // EnumClass // --------------------------------------------------------------------------- public class EnumArgument<T extends Enum<T>> extends InputSelect<T> { protected transient final Class<T> _enumClass; private transient final T _defaultValue; public EnumArgument(T defaultValue) { this("", defaultValue, false); } public EnumArgument(String name, T defaultValue, boolean refreshOnChange) { this(name,defaultValue); if(refreshOnChange)setRefreshOnChange(); } public EnumArgument(String name, T defaultValue) { super(name, false); _defaultValue = defaultValue; _enumClass = (Class<T>) defaultValue.getClass(); } public EnumArgument(String name, Class enumClass) { super(name, true); _defaultValue = null; _enumClass = enumClass; } @Override protected String[] selectValues() { T[] _enums = _enumClass.getEnumConstants(); String[] result = new String[_enums.length]; for (int i = 0; i < _enums.length; ++i) result[i] = _enums[i].toString(); return result; } @Override protected String selectedItemValue() { T v = value(); if (v == null) return ""; return v.toString(); } @Override protected T parse(String input) throws IllegalArgumentException { for (T v : _enumClass.getEnumConstants()) if (v.toString().equals(input)) return v; throw new H2OIllegalArgumentException(this, "Only "+Arrays.toString(selectValues())+" accepted for argument "+_name); } @Override protected T defaultValue() { return _defaultValue; } @Override protected String queryDescription() { return "Any of "+Arrays.toString(selectValues()); } } // --------------------------------------------------------------------------- // ExistingFile // --------------------------------------------------------------------------- public class ExistingFile extends TypeaheadInputText<File> { public ExistingFile(String name) { super(TypeaheadFileRequest.class, name, true); } @Override protected File parse(String input) throws IllegalArgumentException { File f = new File(input); if( !f.exists() ) throw new H2OIllegalArgumentException(this, "File "+input+" not found"); return f; } @Override protected String queryDescription() { return "Existing file or directory"; } @Override protected File defaultValue() { return null; } @Override protected String[] errors() { return new String[] { "File not found" }; } } public class GeneralFile extends TypeaheadInputText<String> { public GeneralFile() {this("");} public GeneralFile(String name) { super(TypeaheadFileRequest.class, name, true); } @Override protected String parse(String input) throws IllegalArgumentException { return input; } @Override protected String queryDescription() { return "File or directory, can be on NFS, HDFS or S3"; } @Override protected String defaultValue() { return ""; } @Override protected String[] errors() { return new String[] { "File not found" }; } } // --------------------------------------------------------------------------- // H2OKey // --------------------------------------------------------------------------- // key with autocompletion and autoconversion to frame public class H2OKey2 extends TypeaheadInputText<Key> { public final Key _defaultValue; public H2OKey2(String name, boolean required) { this(name,null,required); } public H2OKey2(String name, Key key) { this(name,key,false); } public H2OKey2(String name, Key key, boolean req) { super(TypeaheadKeysRequest.class,name, req); _defaultValue = key; } @Override protected Key parse(String input) { return Key.make(input); } @Override protected Key defaultValue() { return _defaultValue; } @Override protected String queryDescription() { return "Valid H2O key"; } } public class H2OKey extends InputText<Key> { public final Key _defaultValue; private final boolean _checkLegal; public H2OKey(String name, boolean required) { this(name,null,required); } public H2OKey(String name, boolean required, boolean checkLegal) { this(name,null,required,checkLegal); } public H2OKey(String name, Key key) { this(name,key,false); } public H2OKey(String name, Key key, boolean req) { super(name, req); _defaultValue = key; _checkLegal = false; } public H2OKey(String name, Key key, boolean req, boolean checkLegal) { super(name, req); _defaultValue = key; _checkLegal = checkLegal; } @Override protected Key parse(String input) { if (_checkLegal && Utils.contains(input, Key.ILLEGAL_USER_KEY_CHARS)) throw new IllegalArgumentException("Key '" + input + "' contains illegal character! Please avoid these characters: " + Key.ILLEGAL_USER_KEY_CHARS); return Key.make(input); } @Override protected Key defaultValue() { return _defaultValue; } @Override protected String queryDescription() { return "Valid H2O key"; } } // --------------------------------------------------------------------------- // H2OExistingKey // --------------------------------------------------------------------------- public class H2OExistingKey extends TypeaheadInputText<Value> { public final Key _defaultValue; public H2OExistingKey(String name) { this(name,true); } public H2OExistingKey(String name,boolean required) { super(TypeaheadKeysRequest.class, name, required); setRefreshOnChange(); _defaultValue = null; } public H2OExistingKey(String name, String keyName) { this(name, Key.make(keyName)); } public void setValue(Value v){ record()._value = v; record()._originalValue = v._key.toString(); } public H2OExistingKey(String name, Key key) { super(TypeaheadKeysRequest.class, name, false); _defaultValue = key; } @Override protected Value parse(String input) throws IllegalArgumentException { Key k = Key.make(input); Value v = DKV.get(k); if (v == null) throw new H2OIllegalArgumentException(this, "Key "+input+" not found!"); return v; } @Override protected Value defaultValue() { if (_defaultValue == null) return null; return DKV.get(_defaultValue); } @Override protected String queryDescription() { return "An existing H2O key"; } } // --------------------------------------------------------------------------- // StringListArgument // --------------------------------------------------------------------------- // NO EMPTY string in values public class StringList extends InputSelect<String> { public final String[] _values; public final int _defaultIndex; public StringList(String name, String[] values) { super(name, true); _values = values; _defaultIndex = -1; } public StringList(String name, String[] values, int defaultIndex) { super(name, false); _values = values; _defaultIndex = defaultIndex; } @Override protected String[] selectValues() { return _values; } @Override protected String selectedItemValue() { if (_required && (!valid())) return ""; return value(); } @Override protected String parse(String input) throws IllegalArgumentException { for (String s : _values) if (s.equals(input)) return input; throw new H2OIllegalArgumentException(this, "Invalid value "+input+", only "+Arrays.toString(_values)+" allowed"); } @Override protected String defaultValue() { if (_defaultIndex == -1) return null; return _values[_defaultIndex]; } @Override protected String queryDescription() { return "Any of "+Arrays.toString(_values); } } // --------------------------------------------------------------------------- // Fluid Vec Arguments // --------------------------------------------------------------------------- /** A Fluid Vec, via a column name in a Frame */ public class FrameKeyVec extends InputSelect<Vec> { final TypeaheadKey _key; boolean _optional = false; final String _desc; protected transient ThreadLocal<Integer> _colIdx= new ThreadLocal(); public FrameKeyVec(String name, TypeaheadKey key, String desc,boolean required) { super(name, required); addPrerequisite(_key=key); setRefreshOnChange(); _desc = desc; } protected Frame fr() { return DKV.get(_key.value()).get(); } @Override protected String[] selectValues() { String [] vals = fr()._names; if(!_required) vals = Utils.append(new String[]{""},vals); return vals; } @Override protected String selectedItemValue() { Frame fr = fr(); if( value() == null || fr == null ) { if(!refreshOnChange()) { // Not if has dependencies, or page doesn't refresh Vec defaultVec = defaultValue(); if( defaultVec != null && fr != null ) for( int i = 0; i < fr.vecs().length; i++ ) if( fr.vecs()[i] == defaultVec ) return fr._names[i]; } return ""; } if(_colIdx.get() == null) return ""; return fr._names[_colIdx.get()]; } @Override protected Vec parse(String input) throws IllegalArgumentException { int cidx = fr().find(input); if (cidx == -1) { try { cidx = Integer.parseInt(input); } catch( NumberFormatException e ) { cidx = -1; } if (cidx < 0 || cidx >= fr().numCols() ) throw new H2OIllegalArgumentException(this, input+" not a name of column, or a column index"); } _colIdx.set(cidx); return fr().vecs()[cidx]; } @Override protected Vec defaultValue() { return null; } @Override protected String queryDescription() { return _desc; } @Override protected String[] errors() { return new String[] { "Not a name of column, or a column index" }; } } /** A Class Vec/Column within a Frame. Limited to 1000 classes, just to prevent madness. */ public class FrameClassVec extends FrameKeyVec { public FrameClassVec(String name, TypeaheadKey key ) { super(name, key,"response column name",true); } @Override protected String[] selectValues() { final Vec [] vecs = fr().vecs(); String[] names = new String[vecs.length]; int j = 0; for( int i = 0; i < vecs.length; ++i) { if( !vecs[i].isUUID() ) // No math on strings or UUIDs names[j++] = fr()._names[i]; } return Arrays.copyOf(names, j); } @Override protected Vec defaultValue() { Frame fr = fr(); return fr != null ? fr.vecs()[fr.vecs().length - 1] : null; } } public class FrameKeyMultiVec extends MultipleSelect<int[]> { final TypeaheadKey _key; final String _description; final boolean _namesOnly; final boolean _filterNAs; transient ArrayList<FrameKeyVec> _ignoredVecs = new ArrayList<FrameKeyVec>(); protected transient ThreadLocal<Integer> _colIdx= new ThreadLocal(); protected Frame fr() { Value v = DKV.get(_key.value()); if(v == null) throw new H2OIllegalArgumentException(this, "Frame not found"); return v.get(); } public FrameKeyMultiVec(String name, TypeaheadKey key, FrameClassVec vec, String description, boolean namesOnly, boolean filterNAs) { super(name); addPrerequisite(_key = key); _description = description; _namesOnly = namesOnly; _filterNAs = filterNAs; if(vec != null) ignoreVec(vec); } public void ignoreVec(FrameKeyVec v) { for(FrameKeyVec vv:_ignoredVecs) if(vv == v)return; addPrerequisite(v); _ignoredVecs.add(v); } public boolean shouldIgnore(int i, Frame fr ) { if(fr.vecs()[i].isUUID()) return true; for(FrameKeyVec v:_ignoredVecs) if(v.value() == fr.vecs()[i]) return true; return false; } public void checkLegality(Vec v) throws IllegalArgumentException { } transient ArrayList<Integer> _selectedCols; // All the columns I'm willing to show the user @Override protected String queryElement() { Frame fr = fr(); ArrayList<Integer> cols = Lists.newArrayList(); for (int i = 0; i < fr.numCols(); ++i) if( !shouldIgnore(i, fr) ) cols.add(i); _selectedCols = cols; return super.queryElement(); } // "values" to send back and for in URLs. Use numbers for density (shorter URLs). @Override protected final String[] selectValues() { String [] res = new String[_selectedCols.size()]; int idx = 0; for(int i : _selectedCols) res[idx++] = String.valueOf(i); return res; } // "names" to select in the boxes. @Override protected String[] selectNames() { Frame fr = fr(); String [] res = new String[_selectedCols.size()]; int idx = 0; for(int i:_selectedCols) { final Vec v = fr.vec(i); int naRatio = (int)(((double)v.naCnt())/v.length()*100); res[idx++] = fr._names[i] + (naRatio > 0?"(" + naRatio + "% NAs)":""); } return res; } @Override protected boolean isSelected(String value) { int[] val = value(); return val != null && Ints.contains(val, frameColumnNameToIndex(fr(), value, _namesOnly)); } @Override protected int[] parse(String input) throws IllegalArgumentException { Frame fr = fr(); ArrayList<Integer> al = new ArrayList(); for (String col : input.split(",")) { col = col.trim(); int idx = frameColumnNameToIndex(fr(), col, _namesOnly); if (0 > idx || idx > fr.numCols()) throw new H2OIllegalArgumentException(this, "Column "+col+" not part of key "+_key.value()); if (al.contains(idx)) throw new H2OIllegalArgumentException(this, "Column "+col+" is specified twice."); checkLegality(fr.vecs()[idx]); al.add(idx); } return Ints.toArray(al); } @Override protected int[] defaultValue() { final Vec [] vecs = fr().vecs(); int [] res = new int[vecs.length]; int j = 0; for( int i = 0; i < vecs.length; ++i) { if(!(vecs[i].min() < vecs[i].max()) || (_filterNAs && ((double)vecs[i].naCnt())/vecs[i].length() > 0.1) || vecs[i].isUUID() ) // No math on strings or UUIDs res[j++] = i; // ignore constant columns and columns with too many NAs } return Arrays.copyOf(res, j); } @Override protected String queryDescription() { return _description; } } public static class H2OIllegalArgumentException extends IllegalArgumentException { public H2OIllegalArgumentException(Argument a, String msg) { super("Field '" + (a!=null ? a.getName() : "<unknown>") + "' : " + msg); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RequestBuilders.java
package water.api; import dontweave.gson.*; import water.AutoBuffer; import water.H2O; import water.Iced; import water.PrettyPrint; import water.api.Request.API; import water.api.RequestBuilders.Response.Status; import water.util.JsonUtil; import water.util.Log; import water.util.RString; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** Builders and response object. * * It just has a stuff of simple builders that walk through the JSON response * and format the stuff into basic html. Understands simplest form of tables, * objects and elements. * * Also defines the response object that contains the response JSON, response * state, other response related automatic variables (timing, etc) and the * custom builders. * * TODO work in progress. * * @author peta */ public class RequestBuilders extends RequestQueries { public static final String ROOT_OBJECT = ""; public static final Gson GSON_BUILDER = new GsonBuilder().setPrettyPrinting().create(); private static final ThreadLocal<DecimalFormat> _format = new ThreadLocal<DecimalFormat>() { @Override protected DecimalFormat initialValue() { return new DecimalFormat("###.####"); } }; static final ThreadLocal<SimpleDateFormat> ISO8601 = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ"); } }; /** Builds the HTML for the given response. * * This is the root of the HTML. Should display all what is needed, including * the status, timing, etc. Then call the recursive builders for the * response's JSON. */ protected String build(Response response) { StringBuilder sb = new StringBuilder(); sb.append("<div class='container'>"); sb.append("<div class='row-fluid'>"); sb.append("<div class='span12'>"); sb.append(buildJSONResponseBox(response)); if( response._status == Response.Status.done ) response.toJava(sb); sb.append(buildResponseHeader(response)); Builder builder = response.getBuilderFor(ROOT_OBJECT); if (builder == null) { sb.append("<h3>"+name()+"</h3>"); builder = OBJECT_BUILDER; } for( String h : response.getHeaders() ) sb.append(h); if( response._response==null ) { boolean done = response._req.toHTML(sb); if(!done) { JsonParser parser = new JsonParser(); String json = new String(response._req.writeJSON(new AutoBuffer()).buf()); JsonObject o = (JsonObject) parser.parse(json); sb.append(builder.build(response, o, "")); } } else sb.append(builder.build(response,response._response,"")); sb.append("</div></div></div>"); return sb.toString(); } protected String name() { return getClass().getSimpleName(); } private static final String _responseHeader = "<table class='table table-bordered'><tr><td min-width: 60px><table style='font-size:12px;margin:0px;' class='table-borderless'>" + " <tr>" + " <td style='border:0px; min-width: 60px;' rowspan='2' style='vertical-align:top;'>%BUTTON&nbsp;&nbsp;</td>" + " <td style='border:0px; min-width: 60px' colspan='6'>" + " %TEXT" + " </td>" + " </tr>" + " <tr>" + " <td style='border:0px; min-width: 60px'><b>Cloud:</b></td>" + " <td style='padding-right:70px;border:0px; min-width: 60px'>%CLOUD_NAME</td>" + " <td style='border:0px; min-width: 60px'><b>Node:</b></td>" + " <td style='padding-right:70px;border:0px; min-width: 60px'>%NODE_NAME</td>" + " <td style='border:0px; min-width: 60px'><b>Time:</b></td>" + " <td style='padding-right:70px;border:0px; min-width: 60px'>%TIME</td>" + " </tr>" + "</table></td></tr></table>" + "<script type='text/javascript'>" + "%JSSTUFF" + "</script>" ; private static final String _redirectJs = "var timer = setTimeout('redirect()',000);\n" + "function countdown_stop() {\n" + " clearTimeout(timer);\n" + "}\n" + "function redirect() {\n" + " window.location.replace('%REDIRECT_URL');\n" + "}\n" ; private static final String _pollJs = "var timer = setTimeout(redirect,%TIMEOUT);\n" + "function countdown_stop() {\n" + " clearTimeout(timer);\n" + "}\n" + "function redirect() {\n" + " document.location.reload(true);\n" + "}\n" ; private static final String _jsonResponseBox = "<div class='pull-right'><a href='#' onclick='$(\"#json_box\").toggleClass(\"hide\");' class='btn btn-inverse btn-mini'>JSON</a></div>" + "<div class='hide' id='json_box'><pre><code class=\"language-json\">" + "%JSON_RESPONSE_BOX" + "</code></pre></div>"; protected String buildJSONResponseBox(Response response) { switch (response._status) { case done : RString result = new RString(_jsonResponseBox); result.replace("JSON_RESPONSE_BOX", response._response == null ? new String(response._req.writeJSON(new AutoBuffer()).buf()) : GSON_BUILDER.toJson(response.toJson())); return result.toString(); case error : case redirect: case poll : default : return ""; } } protected String buildResponseHeader(Response response) { RString result = new RString(_responseHeader); JsonObject obj = response.responseToJson(); result.replace("CLOUD_NAME",obj.get(JSON_H2O).getAsString()); result.replace("NODE_NAME",obj.get(NODE).getAsString()); result.replace("TIME", PrettyPrint.msecs(obj.get(REQUEST_TIME).getAsLong(), true)); switch (response._status) { case error: result.replace("BUTTON","<button class='btn btn-danger disabled'>"+response._status.toString()+"</button>"); result.replace("TEXT","An error has occurred during the creation of the response. Details follow:"); break; case done: //result.replace("BUTTON","<button class='btn btn-success disabled'>"+response._status.toString()+"</button>"); //result.replace("TEXT","The result was a success and no further action is needed. JSON results are prettyprinted below."); result = new RString(""); break; case redirect: result.replace("BUTTON","<button class='btn btn-primary' onclick='redirect()'>"+response._status.toString()+"</button>"); result.replace("TEXT","Request was successful and the process was started. You will be redirected to the new page in 1 seconds, or when you click on the redirect" + " button on the left. If you want to keep this page for longer you can <a href='#' onclick='countdown_stop()'>stop the countdown</a>."); RString redirect = new RString(_redirectJs); redirect.replace("REDIRECT_URL",response._redirectName+".html"+encodeRedirectArgs(response._redirectArgs,response._redirArgs)); result.replace("JSSTUFF", redirect.toString()); break; case poll: if (response._redirectArgs != null) { RString poll = new RString(_redirectJs); poll.replace("REDIRECT_URL",requestName()+".html"+encodeRedirectArgs(response._redirectArgs,response._redirArgs)); result.replace("JSSTUFF", poll.toString()); } else { RString poll = new RString(_pollJs); poll.replace("TIMEOUT", response._pollProgress==0 ? 4500 : 5000); result.replace("JSSTUFF", poll.toString()); } int pct = (int) ((double)response._pollProgress / response._pollProgressElements * 100); result.replace("BUTTON","<button class='btn btn-primary' onclick='redirect()'>"+response._status.toString()+"</button>"); result.replace("TEXT","<div style='margin-bottom:0px;padding-bottom:0xp;height:5px;' class='progress progress-stripped'><div class='bar' style='width:"+pct+"%;'></div></div>" + "Request was successful, but the process has not yet finished. The page will refresh every 5 seconds, or you can click the button" + " on the left. If you want you can <a href='#' onclick='countdown_stop()'>disable the automatic refresh</a>."); break; default: result.replace("BUTTON","<button class='btn btn-inverse disabled'>"+response._status.toString()+"</button>"); result.replace("TEXT","This is an unknown response state not recognized by the automatic formatter. The rest of the response is displayed below."); break; } return result.toString(); } /** Basic builder for objects. () */ public static final Builder OBJECT_BUILDER = new ObjectBuilder(); /** Basic builder for arrays. (table) */ public static final Builder ARRAY_BUILDER = new ArrayBuilder(); /** Basic builder for array rows. (tr) */ public static final Builder ARRAY_ROW_BUILDER = new ArrayRowBuilder(); /** Basic build for shaded array rows. (tr class='..') */ public static final Builder ARRAY_HEADER_ROW_BUILDER = new ArrayHeaderRowBuilder(); /** Basic builder for elements inside objects. (dl,dt,dd) */ public static final ElementBuilder ELEMENT_BUILDER = new ElementBuilder(); /** Basic builder for elements in array row objects. (td) */ public static final Builder ARRAY_ROW_ELEMENT_BUILDER = new ArrayRowElementBuilder(); /** Basic builder for elements in array rows single col. (tr and td) */ public static final Builder ARRAY_ROW_SINGLECOL_BUILDER = new ArrayRowSingleColBuilder(); // =========================================================================== // Response // =========================================================================== /** This is a response class for the JSON. * * Instead of simply returning a JsonObject, each request returns a new * response object that it must create. This is (a) cleaner (b) more * explicit and (c) allows to specify response states used for proper * error reporting, stateless and statefull processed and so on, and (d) * allows specification of HTML builder hooks in a nice clean interface. * * The work pattern should be that in the serve() method, a JsonObject is * created and populated with the variables. Then if any error occurs, an * error response should be returned. * * Otherwise a correct state response should be created at the end from the * json object and returned. * * JSON response structure: * * response : status = (done,error,redirect, ...) * h2o = name of the cloud * node = answering node * time = time in MS it took to process the request serve() * other fields as per the response type * other fields that should go to the user * if error: * error : error reported */ public static final class Response { /** Status of the response. * * Defines the state of the response so that it can be nicely reported to * the user in either in JSON or in HTML in a meaningful manner. */ public static enum Status { done, ///< Indicates that the request has completed and no further action from the user is required poll, ///< Indicates that the same request should be repeated to see some progress redirect, ///< Indicates that the request was successful, but new request must be filled to obtain results error ///< The request was an error. } /** Time it took the request to finish. In ms. */ protected long _time; /** Status of the request. */ protected final Status _status; /** Name of the redirected request. This is only valid if the response is * redirect status. */ protected final String _redirectName; /** Arguments of the redirect object. These will be given to the redirect * object when called. */ protected final JsonObject _redirectArgs; protected final Object[] _redirArgs; /** Poll progress in terms of finished elements. */ protected final int _pollProgress; /** Total elements to be finished before the poll will be done. */ protected final int _pollProgressElements; /** Response object for JSON requests. */ protected final JsonObject _response; public final Request _req; protected boolean _strictJsonCompliance = true; /** Custom builders for JSON elements when converting to HTML automatically. */ protected final HashMap<String,Builder> _builders = new HashMap(); /** Custom headers to show in the html. */ protected final List<String> _headers = new ArrayList(); /** Private constructor creating the request with given type and response * JSON object. * * Use the static methods to construct the response objects. (looks better * when we have a lot of them). */ private Response(Status status, JsonObject response) { _status = status; _response = response; _redirectName = null; _redirectArgs = null; _redirArgs = null; _pollProgress = -1; _pollProgressElements = -1; _req = null; } private Response(Status status, JsonObject response, String redirectName, JsonObject redirectArgs) { assert (status == Status.redirect); _status = status; _response = response; _redirectName = redirectName; _redirectArgs = redirectArgs; _redirArgs = null; _pollProgress = -1; _pollProgressElements = -1; _req = null; } private Response(Status status, JsonObject response, int progress, int total, JsonObject pollArgs) { assert (status == Status.poll); _status = status; _response = response; _redirectName = null; _redirectArgs = pollArgs; _redirArgs = null; _pollProgress = progress; _pollProgressElements = total; _req = null; } private Response(Status status, Request req, int progress, int total, Object...pollArgs) { assert (status == Status.poll); _status = status; _response = null; _redirectName = null; _redirectArgs = null; _redirArgs = pollArgs; _pollProgress = progress; _pollProgressElements = total; _req = req; } /** Response v2 constructor */ private Response(Status status, Request req, int progress, int total, String redirTo, Object... args) { _status = status; _response = null; _redirectName = redirTo; _redirectArgs = null; _redirArgs = args; _pollProgress = progress; _pollProgressElements = total; _req = req; } /** Returns new error response with given error message. */ public static Response error(Throwable e) { if( !(e instanceof IllegalAccessException )) Log.err(e); String message = e.getMessage(); if( message == null ) message = e.getClass().toString(); return error(message); } public static Response error(String message) { if( message == null ) message = "no error message"; JsonObject obj = new JsonObject(); obj.addProperty(ERROR,message); Response r = new Response(Status.error,obj); r.setBuilder(ERROR, new PreFormattedBuilder()); return r; } /** Returns new done response with given JSON response object. */ public static Response done(JsonObject response) { assert response != null : "Called Response.done with null JSON response - perhaps you should call Response.doneEmpty"; return new Response(Status.done, response); } /** Response done v2. */ public static Response done(Request req) { return new Response(Response.Status.done,req,-1,-1,(String) null); } /** A unique empty response which carries an empty JSON object */ public static final Response EMPTY_RESPONSE = Response.done(new JsonObject()); /** Returns new done empty done response. * Should be called only in cases which does not need json response. * see HTMLOnlyRequest */ public static Response doneEmpty() { return EMPTY_RESPONSE; } /** Creates the new response with status redirect. This response will be * redirected to another request specified by redirectRequest with the * redirection arguments provided in redirectArgs. */ public static Response redirect(JsonObject response, Class<? extends Request> req, JsonObject args) { return new Response(Status.redirect, response, req.getSimpleName(), args); } /** Redirect for v2 API */ public static Response redirect(Request req, String redirectName, Object...redirectArgs) { return new Response(Response.Status.redirect, req, -1, -1, redirectName, redirectArgs); } /** Returns the poll response object. */ public static Response poll(JsonObject response, int progress, int total) { return new Response(Status.poll,response, progress, total, null); } /** Returns the poll response object initialized by percents completed. */ public static Response poll(JsonObject response, float progress) { int p = (int) (progress * 100); return Response.poll(response, p, 100); } /** returns the poll response object with different arguments that was * this call. */ public static Response poll(JsonObject response, int progress, int total, JsonObject pollArgs) { return new Response(Status.poll,response, progress, total, pollArgs); } /** Returns the poll response object. */ public static Response poll(Request req, int progress, int total, Object...pollArgs) { return new Response(Status.poll,req, progress, total, pollArgs); } /** Sets the time of the response as a difference between the given time and * now. Called automatically by serving request. Only available in JSON and * HTML. */ public final void setTimeStart(long timeStart) { _time = System.currentTimeMillis() - timeStart; } /** Associates a given builder with the specified JSON context. JSON context * is a dot separated path to the JSON object/element starting from root. * * One exception is an array row element, which does not really have a * distinct name in JSON and is thus identified as the context name of the * array + "_ROW" appended to it. * * The builder object will then be called to build the HTML for the * particular JSON element. By wise subclassing of the preexisting builders * and changing their behavior an arbitrarily complex webpage can be * created. */ public Response setBuilder(String contextName, Builder builder) { _builders.put(contextName, builder); return this; } /** Returns the builder for given JSON context element. Null if not found * in which case a default builder object will be used. These default * builders are specified by the builders themselves. */ protected Builder getBuilderFor(String contextName) { return _builders.get(contextName); } public void addHeader(String h) { _headers.add(h); } public List<String> getHeaders() { return _headers; } /** Returns the response system json. That is the response type, time, * h2o basics and other automatic stuff. * @return */ protected JsonObject responseToJson() { JsonObject resp = new JsonObject(); resp.addProperty(STATUS,_status.toString()); resp.addProperty(JSON_H2O, H2O.NAME); resp.addProperty(NODE, H2O.SELF.toString()); resp.addProperty(REQUEST_TIME, _time); switch (_status) { case done: case error: break; case redirect: resp.addProperty(REDIRECT,_redirectName); if (_redirectArgs != null) resp.add(REDIRECT_ARGS,_redirectArgs); break; case poll: resp.addProperty(PROGRESS, _pollProgress); resp.addProperty(PROGRESS_TOTAL, _pollProgressElements); break; default: assert(false): "Unknown response type "+_status.toString(); } return resp; } /** Returns the JSONified version of the request. At the moment just * returns the response. */ public JsonObject toJson() { JsonObject res = _response; if( _strictJsonCompliance ) res = JsonUtil.escape(res); // in this case, creating a cyclical structure would kill us. if( _response != null && _response == _redirectArgs ) { res = new JsonObject(); for( Entry<String, JsonElement> e : _response.entrySet() ) { res.add(e.getKey(), e.getValue()); } } res.add(RESPONSE, responseToJson()); return res; } public String toXml() { JsonObject jo = this.toJson(); String jsonString = jo.toString(); org.json.JSONObject jo2 = new org.json.JSONObject(jsonString); String xmlString = org.json.XML.toString(jo2); return xmlString; } public void toJava(StringBuilder sb) { if( _req != null ) _req.toJava(sb); } /** Returns the error of the request object if any. Returns null if the * response is not in error state. */ public String error() { if (_status != Status.error) return null; return _response.get(ERROR).getAsString(); } public void escapeIllegalJsonElements() { _strictJsonCompliance = true; } public ResponseInfo extractInfo() { String redirectUrl = null; if (_status == Status.redirect) redirectUrl = _redirectName+".json"+encodeRedirectArgs(_redirectArgs,_redirArgs); if (_status == Status.poll) redirectUrl = _req.href()+".json"+encodeRedirectArgs(_redirectArgs,_redirArgs); return new ResponseInfo(redirectUrl, _time, _status); } } /** Class holding technical information about request/response. It will be served as a part of Request2's * response. */ public static class ResponseInfo extends Iced { static final int API_WEAVER=1; static public DocGen.FieldDoc[] DOC_FIELDS; final @API(help="H2O cloud name.") String h2o; final @API(help="Node serving the response.") String node; final @API(help="Request processing time.") long time; final @API(help="Response status") Response.Status status; final @API(help="Redirect name.") String redirect_url; public ResponseInfo(String redirect_url, long time, Status status) { this.h2o = H2O.NAME; this.node = H2O.SELF.toString(); this.redirect_url = redirect_url; this.time = time; this.status = status; } } // --------------------------------------------------------------------------- // Builder // --------------------------------------------------------------------------- /** An abstract class to build the HTML page automatically from JSON. * * The idea is that every JSON element in the response structure (dot * separated) may be a unique context that might be displayed in a different * way. By creating specialized builders and assigning them to the JSON * element contexts you can build arbitrarily complex HTML page. * * The basic builders for elements, arrays, array rows and elements inside * array rows are provided by default. * * Each builder can also specify default builders for its components to make * sure for instance that tables in arrays do not recurse and so on. */ public static abstract class Builder { /** Override this method to provide HTML for the given json element. * * The arguments are the response object, the element whose HTML should be * produced and the contextName of the element. */ public abstract String build(Response response, JsonElement element, String contextName); /** Adds the given element name to the existing context. Dot concatenates * the names. */ public static String addToContext(String oldContext, String name) { if (oldContext.isEmpty()) return name; return oldContext+"."+name; } /** For a given context returns the element name. That is the last word * after a dot, or the full string if dot is not present. */ public static String elementName(String context) { int idx = context.lastIndexOf("."); return context.substring(idx+1); } /** Returns the default builders. * * These are element builder, object builder and array builder. */ public Builder defaultBuilder(JsonElement element) { if (element instanceof JsonArray) return ARRAY_BUILDER; else if (element instanceof JsonObject) return OBJECT_BUILDER; else return ELEMENT_BUILDER; } } // --------------------------------------------------------------------------- // ObjectBuilder // --------------------------------------------------------------------------- /** Object builder. * * By default objects are displayed as a horizontal dl elements with their * heading preceding any of the values. Methods for caption, header, * footer as well as element building are provided so that the behavior can * easily be customized. */ public static class ObjectBuilder extends Builder { /** Displays the caption of the object. */ public String caption(JsonObject object, String objectName) { return objectName.isEmpty() ? "" : "<h4>"+objectName+"</h4>"; } /** Returns the header of the object. * * That is any HTML displayed after caption and before any object's * contents. */ public String header(JsonObject object, String objectName) { return ""; } /** Returns the footer of the object. * * That is any HTML displayed after any object's contents. */ public String footer(JsonObject object, String objectName) { return ""; } /** Creates the HTML of the object. * * That is the caption, header, all its contents in order they were * added and then the footer. There should be no need to overload this * function, rather override the provided hooks above. */ public String build(Response response, JsonObject object, String contextName) { StringBuilder sb = new StringBuilder(); String name = elementName(contextName); sb.append(caption(object, name)); sb.append(header(object, name)); for (Map.Entry<String,JsonElement> entry : object.entrySet()) { JsonElement e = entry.getValue(); String elementContext = addToContext(contextName, entry.getKey()); Builder builder = response.getBuilderFor(elementContext); if (builder == null) builder = defaultBuilder(e); sb.append(builder.build(response, e, elementContext)); } sb.append(footer(object, elementName(contextName))); return sb.toString(); } /** The original build method. Calls build with json object, if not an * object, displays an alert box with the JSON contents. */ public String build(Response response, JsonElement element, String contextName) { if (element instanceof JsonObject) return build(response, (JsonObject) element, contextName); return "<div class='alert alert-error'>Response element "+contextName+" expected to be JsonObject. Automatic display not available</div><pre>"+element.toString()+"</pre>"; } } public static class NoCaptionObjectBuilder extends ObjectBuilder { public String caption(JsonObject object, String objectName) { return ""; } } // --------------------------------------------------------------------------- // Array builder // --------------------------------------------------------------------------- /** Builds the HTML for an array. Arrays generally go to a table. Is similar * to the object, but rather than a horizontal dl generally displays as * a table. * * Can produce a header of the table and has hooks for rows. */ public static class ArrayBuilder extends Builder { /** Caption of the table. */ public String caption(JsonArray array, String name) { return "<h4>"+name+"</h4>"; } /** Header of the table. Produces header off the first element if it is * object, or a single column header named value if it is a primitive. Also * includes the table tag. */ public String header(JsonArray array) { StringBuilder sb = new StringBuilder(); sb.append("<span style='display: inline-block;'>"); sb.append("<table class='table table-striped table-bordered'>"); if (array.get(0) instanceof JsonObject) { sb.append("<tr>"); for (Map.Entry<String,JsonElement> entry : ((JsonObject)array.get(0)).entrySet()) sb.append("<th style='min-width: 60px;'>").append(header(entry.getKey())).append("</th>"); sb.append("</tr>"); } return sb.toString(); } public String header(String key) { return JSON2HTML(key); } /** Footer of the table, the end of table tag. */ public String footer(JsonArray array) { return "</table></span>"; } /** Default builders for the table. It is either a table row builder if the * row is an object, or a row single column builder if it is a primitive * or another array. */ @Override public Builder defaultBuilder(JsonElement element) { return element instanceof JsonObject ? ARRAY_ROW_BUILDER : ARRAY_ROW_SINGLECOL_BUILDER; } /** Builds the array. Creates the caption, header, all the rows and the * footer or determines that the array is empty. */ public String build(Response response, JsonArray array, String contextName) { StringBuilder sb = new StringBuilder(); sb.append(caption(array, elementName(contextName))); if (array.size() == 0) { sb.append("<div class='alert alert-info'>empty array</div>"); } else { sb.append(header(array)); for (JsonElement e : array) { Builder builder = response.getBuilderFor(contextName+"_ROW"); if (builder == null) builder = defaultBuilder(e); sb.append(builder.build(response, e, contextName)); } sb.append(footer(array)); } return sb.toString(); } /** Calls the build method with array. If not an array, displays an alert * with the JSON contents of the element. */ public String build(Response response, JsonElement element, String contextName) { if (element instanceof JsonArray) return build(response, (JsonArray)element, contextName); return "<div class='alert alert-error'>Response element "+contextName+" expected to be JsonArray. Automatic display not available</div><pre>"+element.toString()+"</pre>"; } } // --------------------------------------------------------------------------- // ElementBuilder // --------------------------------------------------------------------------- /** A basic element builder. * * Elements are displayed as their string values, everything else as their * JSON values. */ public static class ElementBuilder extends Builder { /** Displays the element in the horizontal dl layout. Override this method * to change the layout. */ public String build(String elementContents, String elementName) { return "<dl class='dl-horizontal'><dt>"+elementName+"</dt><dd>"+elementContents+"</dd></dl>"; } public String arrayToString(JsonArray array, String contextName) { return array.toString(); } public String objectToString(JsonObject obj, String contextName) { return obj.toString(); } public String elementToString(JsonElement elm, String contextName) { String elementName = elementName(contextName); if( elementName.endsWith(Suffixes.BYTES_PER_SECOND) ) { return PrettyPrint.bytesPerSecond(elm.getAsLong()); } else if( elementName.endsWith(Suffixes.BYTES) ) { return PrettyPrint.bytes(elm.getAsLong()); } else if( elementName.endsWith(Suffixes.MILLIS) ) { return PrettyPrint.msecs(elm.getAsLong(), true); } else if( elm instanceof JsonPrimitive && ((JsonPrimitive)elm).isString() ) { return elm.getAsString(); } else if( elm instanceof JsonPrimitive && ((JsonPrimitive)elm).isNumber() ) { Number n = elm.getAsNumber(); if( n instanceof Double ) { Double d = (Double) n; return format(d); } return elm.getAsString(); } else { return elm.toString(); } } public static String format(double value) { if( Double.isNaN(value) ) return ""; return _format.get().format(value); } public String elementToName(String contextName) { String base = elementName(contextName); for( String s : new String[] { Suffixes.BYTES_PER_SECOND, Suffixes.BYTES, Suffixes.MILLIS, }) { if( base.endsWith(s) ) return base.substring(0, base.length() - s.length()); } return base; } /** Based of the element type determines its string value and then calls * the string build version. */ @Override public String build(Response response, JsonElement element, String contextName) { String base; if (element instanceof JsonArray) { base = arrayToString((JsonArray)element, contextName); } else if (element instanceof JsonObject) { base = objectToString((JsonObject)element, contextName); } else { base = elementToString(element, contextName); } return build(base, elementToName(contextName)); } } public static class KeyElementBuilder extends ElementBuilder { @Override public String build(String content, String name) { try { String k = URLEncoder.encode(content, "UTF-8"); return super.build("<a href='Inspect.html?key="+k+"'>"+content+"</a>", name); } catch( UnsupportedEncodingException e ) { throw Log.errRTExcept(e); } } } public static class PreFormattedBuilder extends ElementBuilder { @Override public String build(String content, String name) { return super.build("<pre>"+content+"</pre>", name); } } // Just the Key as a link, without any other cruft public static class KeyLinkElementBuilder extends ElementBuilder { @Override public String build(Response response, JsonElement element, String contextName) { try { String key = element.getAsString(); String k = URLEncoder.encode(key, "UTF-8"); return "<a href='Inspect.html?key="+k+"'>"+key+"</a>"; } catch( UnsupportedEncodingException e ) { throw Log.errRTExcept(e); } } } public static class BooleanStringBuilder extends ElementBuilder { final String _t, _f; public BooleanStringBuilder(String t, String f) { _t=t; _f=f; } @Override public String build(Response response, JsonElement element, String contextName) { boolean b = element.getAsBoolean(); return "<dl class='dl-horizontal'><dt></dt><dd>"+(b?_t:_f)+"</dd></dl>"; } } public static class HideBuilder extends ElementBuilder { @Override public String build(Response response, JsonElement element, String contextName) { return ""; } } // --------------------------------------------------------------------------- // ArrayRowBuilder // --------------------------------------------------------------------------- /** A row in the array table. * * Is an object builder with no caption and header and footer being the * table row tags. Default builder is array row element (td). */ public static class ArrayRowBuilder extends ObjectBuilder { @Override public String caption(JsonObject object, String objectName) { return ""; } @Override public String header(JsonObject object, String objectName) { //Gson g = new Gson(); //Log.info(g.toJson(object)); return "<tr id='row_"+object.get("row")+"'>"; } @Override public String footer(JsonObject object, String objectName) { return "</tr>"; } @Override public Builder defaultBuilder(JsonElement element) { return ARRAY_ROW_ELEMENT_BUILDER; } } public static class ArrayHeaderRowBuilder extends ArrayRowBuilder { @Override public String header(JsonObject object, String objectName) { return "<tr class='warning'>"; } } // --------------------------------------------------------------------------- // ArrayRowElementBuilder // --------------------------------------------------------------------------- /** Default array row element. * * A simple element builder than encapsulates into a td. */ public static class ArrayRowElementBuilder extends ElementBuilder { public String build(String elementContents, String elementName) { return "<td style='min-width: 60px;'>"+elementContents+"</td>"; } } // --------------------------------------------------------------------------- // ArrayRowSingleColBuilder // --------------------------------------------------------------------------- /** Array row for primitives. * * A row with single td element. */ public static class ArrayRowSingleColBuilder extends ElementBuilder { public String build(String elementContents, String elementName) { return "<tr style='min-width: 60px;'><td style='min-width: 60px;'>"+elementContents+"</td></tr>"; } } // --------------------------------------------------------------------------- // PaginatedTable // --------------------------------------------------------------------------- /** A table with pagination controls. * * Use this builder when large data is returned not at once. */ public static class PaginatedTable extends ArrayBuilder { protected final String _offsetJSON; protected final String _viewJSON; protected final JsonObject _query; protected final long _max; protected final boolean _allowInfo; protected final long _offset; protected final int _view; public PaginatedTable(JsonObject query, long offset, int view, long max, boolean allowInfo, String offsetJSON, String viewJSON) { _offsetJSON = offsetJSON; _viewJSON = viewJSON; _query = query; _max = max; _allowInfo = allowInfo; _offset = offset; _view = view; } public PaginatedTable(JsonObject query, long offset, int view, long max, boolean allowInfo) { this(query, offset, view, max, allowInfo, OFFSET, VIEW); } protected String link(String caption, long offset, int view, boolean disabled) { _query.addProperty(_offsetJSON, offset); _query.addProperty(_viewJSON, view); if (disabled) return "<li class='disabled'><a>"+caption+"</a></li>"; else return "<li><a href='"+RequestStatics.encodeRedirectArgs(_query,null)+"'>"+caption+"</a></li>"; } protected String infoButton() { if (!_allowInfo) return ""; return "<span class='pagination'><ul>"+link("info",-1,_view,_offset==1)+"</ul></span>&nbsp;&nbsp;"; } protected String pagination() { StringBuilder sb = new StringBuilder(); sb.append("<div style='text-align:center;'>"); sb.append(infoButton()); long firstPageItems = _offset % _view; long lastPageItems = (_max-_offset) % _view; long prevPages = _offset / _view + (firstPageItems>0?1:0); long nextPages = _offset + _view >= _max ? 0 : Math.max(_max-_offset-_view, 0) / _view + (lastPageItems>0?1:0); long lastOffset = _offset + nextPages * _view; long currentIdx = prevPages; long lastIdx = currentIdx+nextPages; long startIdx = Math.max(currentIdx-5,0); long endIdx = Math.min(startIdx + 11, lastIdx); if (_offset == -1) currentIdx = -1; sb.append("<span class='pagination'><ul>"); sb.append(link("|&lt;",0,_view, _offset == 0)); sb.append(link("&lt;",Math.max(_offset-_view,0),_view, currentIdx==0)); if (startIdx>0) sb.append(link("...",0,0,true)); for (long i = startIdx; i <= endIdx; ++i) sb.append(link(String.valueOf(i),_view*i,_view,i == currentIdx)); if (endIdx<lastIdx) sb.append(link("...",0,0,true)); sb.append(link("&gt;",_offset+_view,_view, currentIdx == lastIdx)); sb.append(link("&gt;|",lastOffset,_view, currentIdx == lastIdx)); sb.append("</ul></span>"); sb.append("</div>"); return sb.toString(); } @Override public String header(JsonArray array) { StringBuilder sb = new StringBuilder(); sb.append(pagination()); sb.append(super.header(array)); return sb.toString(); } @Override public String footer(JsonArray array) { StringBuilder sb = new StringBuilder(); sb.append(super.footer(array)); sb.append(pagination()); return sb.toString(); } } public class WarningCellBuilder extends ArrayRowElementBuilder { @Override public String arrayToString(JsonArray arr, String contextName) { StringBuilder sb = new StringBuilder(); String sep = ""; for( JsonElement e : arr ) { sb.append(sep).append(e.getAsString()); sep = "</br>"; } return sb.toString(); } } public class KeyCellBuilder extends ArrayRowElementBuilder { @Override public String elementToString(JsonElement element, String contextName) { String str = element.getAsString(); try { String key = URLEncoder.encode(str,"UTF-8"); String delete = "<a href='RemoveAck.html?"+KEY+"="+key+"'><button class='btn btn-danger btn-mini'>X</button></a>"; return delete + "&nbsp;&nbsp;" + Inspector.link(str, str); } catch( UnsupportedEncodingException e ) { throw Log.errRTExcept(e); } } } public class KeyMinAvgMaxBuilder extends ArrayRowElementBuilder { private String trunc(JsonObject obj, String fld, int n) { JsonElement je = obj.get(fld); if( je == null || je instanceof JsonNull ) return "<br>"; String s1 = je.getAsString(); String s2 = (s1.length() > n ? s1.substring(0,n) : s1); String s3 = s2.replace(" ","&nbsp;"); return s3+"<br>"; } @Override public String objectToString(JsonObject obj, String contextName) { if (!obj.has(MIN)) return ""; return "<strong>"+trunc(obj,HEADER,10)+"</strong>"+trunc(obj,MIN,6)+trunc(obj,MEAN,6)+trunc(obj,MAX,6); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RequestQueries.java
package water.api; import hex.GridSearch; import java.util.*; import water.H2O; import water.util.RString; /** * * @author peta */ public class RequestQueries extends RequestArguments { /** Overwrite this method to be able to change / disable values of other * arguments on certain argument changes. * * This is done for both query checking and request checking. */ protected void queryArgumentValueSet(Argument arg, Properties inputArgs) { } /** Checks the given arguments. * * When first argument is found wrong, generates the json error and returns the * result to be returned if any problems were found. Otherwise returns * * @param args * @param type * @return */ protected final String checkArguments(Properties args, RequestType type) { // Why the following lines duplicate lines from Request#92 - handling query? // reset all arguments for (Argument arg: _arguments) arg.reset(); // return query if in query mode if (type == RequestType.query) return buildQuery(args,type); /* // Check that for each actual input argument from the user, there is some // request argument that this method is expecting. //*/ if (H2O.OPT_ARGS.check_rest_params && !(this instanceof GridSearch) && !(this instanceof HTTP500)) { Enumeration en = args.propertyNames(); while (en.hasMoreElements()) { boolean found = false; String key = (String) en.nextElement(); for (Argument arg: _arguments) { if (arg._name.equals(key)) { found = true; break; } } if (!found) { return jsonError("Request specifies the argument '"+key+"' but it is not a valid parameter for this query " + this.getClass().getName()).toString(); } } } // check the arguments now for (Argument arg: _arguments) { if (!arg.disabled()) { try { arg.check(RequestQueries.this, args.getProperty(arg._name,"")); queryArgumentValueSet(arg, args); } catch( IllegalArgumentException e ) { if (type == RequestType.json) return jsonError("Argument '"+arg._name+"' error: "+e.getMessage()).toString(); else return buildQuery(args,type); } } } return null; } protected static final String _queryHtml = "<div class='container'>" + "<div class='row-fluid'>" + "<div class='span12'>" + "<h3>Request %REQ_NAME <a href='%REQ_NAME.help'><i class='icon-question-sign'></i></a></h3>" + "<p></p>" + " <dl class='dl-horizontal'><dt></dt><dd>" + " <button class='btn btn-primary' onclick='query_submit()'>Submit</button>" + " <button class='btn btn-info' onclick='query_refresh(event || window.event)'>Refresh</button>" + " <button class='btn' onclick='query_reset()'>Reset</button>" + " </dd></dl>" + " %QUERY" + " <dl class='dl-horizontal'><dt></dt><dd>" + " <button class='btn btn-primary' onclick='query_submit()'>Submit</button>" + " <button class='btn btn-info' onclick='query_refresh(event || window.event)'>Refresh</button>" + " <button class='btn' onclick='query_reset()'>Reset</button>" + " </dd></dl>" + " <script type='text/javascript'>" + " %SCRIPT" + " </script>" + "</div></div></div>" ; private static final String _queryJs = "\nfunction query_refresh(event) {\n" + " query_submit('.query', event.data, null);\n" + "}\n" + "function query_submit(requestType, specArg, specValue) {\n" + " if (typeof(requestType) === 'undefined')\n" + " requestType='.html';\n" + " var request = {};\n" + " %REQUEST_ELEMENT{" + "%ELEMENT_PREQ request.%ELEMENT_NAME = query_value_%ELEMENT_NAME();\n" + " }\n" + " var location = '%REQUEST_NAME'+requestType+'?'+$.param(request);\n" + " if (requestType == '.query') {\n" + " window.location.replace(location);\n" + " } else {\n" + " window.location = location;\n" + " }\n" + "}\n" + "function query_reset() {\n" + " window.location.replace('%REQUEST_NAME.query');\n" + "}\n" + "%ELEMENT_VALUE{ %BODY\n }" + "%ELEMENT_ADDONS{ %BODY\n }" + "%ELEMENT_ONCHANGE{ %BODY\n }" ; /** Returns the request query form produced from the given input arguments. */ protected String buildQuery(Properties parms, RequestType type) { if (parms.isEmpty()) type = RequestType.query; RString result = new RString(_queryHtml); result.replace("REQ_NAME", this.getClass().getSimpleName()); StringBuilder query = new StringBuilder(); query.append("<form onsubmit='return false;'>"); RString script = new RString(_queryJs); script.replace("REQUEST_NAME", getClass().getSimpleName()); for (Argument arg: _arguments) { try { arg.check(RequestQueries.this, parms.getProperty(arg._name,"")); queryArgumentValueSet(arg, parms); } catch( IllegalArgumentException e ) { // in query mode only display error for arguments present if ((type != RequestType.query) || !parms.getProperty(arg._name,"").isEmpty()) query.append("<div class='alert alert-error'>"+e.getMessage()+"</div>"); } if (arg._hideInQuery) continue; if (!arg.disabled()) { RString x = script.restartGroup("REQUEST_ELEMENT"); x.replace("ELEMENT_NAME",arg._name); // If some Argument has prerequisites, and those pre-reqs changed on // this very page load then we do not assign the arg here: the values // passed will be something valid from the PRIOR page - based on the // old pre-req - and won't be correct. Not assigning them here means // we'll act "as if" the field was never filled in. if( arg._prerequisites != null ) { StringBuilder sb = new StringBuilder("if( "); ArrayList<RequestArguments.Argument> preqs = arg._prerequisites; for( RequestArguments.Argument dep : preqs ) sb.append("specArg!=='").append(dep._name).append("' && "); sb.append("true ) "); x.replace("ELEMENT_PREQ",sb); } x.append(); x = script.restartGroup("ELEMENT_VALUE"); x.replace("ELEMENT_NAME",arg._name); x.replace("BODY","function query_value_"+arg._name+"() { "+arg.jsValue()+"} "); x.append(); } if (arg.refreshOnChange()) { RString x = script.restartGroup("ELEMENT_ONCHANGE"); x.replace("BODY",arg.jsRefresh("query_refresh")); x.append(); } RString x = script.restartGroup("ELEMENT_ADDONS"); x.replace("BODY", arg.jsAddons()); x.append(); } for(Argument arg:_arguments){ if (arg._hideInQuery) continue; query.append(arg.query()); } query.append("</form>"); result.replace("QUERY",query.toString()); result.replace("SCRIPT",script.toString()); return result.toString(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RequestServer.java
package water.api; import com.google.common.io.ByteStreams; import com.google.common.io.Closeables; import hex.*; import hex.GridSearch.GridSearchProgress; import hex.KMeans2.KMeans2ModelView; import hex.KMeans2.KMeans2Progress; import hex.anomaly.Anomaly; import hex.deepfeatures.DeepFeatures; import hex.deeplearning.DeepLearning; import hex.drf.DRF; import hex.gapstat.GapStatistic; import hex.gapstat.GapStatisticModelView; import hex.gbm.GBM; import hex.glm.*; import hex.nb.NBModelView; import hex.nb.NBProgressPage; import hex.gapstat.GapStatisticProgressPage; import hex.nb.NaiveBayes; import hex.pca.PCA; import hex.pca.PCAModelView; import hex.pca.PCAProgressPage; import hex.pca.PCAScore; import hex.singlenoderf.SpeeDRF; import hex.singlenoderf.SpeeDRFModelView; import hex.singlenoderf.SpeeDRFProgressPage; import water.*; import water.api.Upload.PostFile; import water.api.handlers.ModelBuildersMetadataHandlerV1; import water.deploy.LaunchJar; import water.schemas.HTTP404V1; import water.schemas.HTTP500V1; import water.schemas.Schema; import water.util.Log; import water.util.Log.Tag.Sys; import water.util.Utils.ExpectedExceptionForDebug; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Method; import java.net.ServerSocket; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** This is a simple web server. */ public class RequestServer extends NanoHTTPD { private static final int LATEST_VERSION = 2; public enum API_VERSION { V_1(1, "/"), V_2(2, "/2/"); // FIXME: better should be /v2/ final private int _version; final private String _prefix; public final String prefix() { return _prefix; } private API_VERSION(int version, String prefix) { _version = version; _prefix = prefix; } } static RequestServer SERVER; // cache of all loaded resources private static final ConcurrentHashMap<String,byte[]> _cache = new ConcurrentHashMap(); protected static final HashMap<String,Request> _requests = new HashMap(); // An array of regexs-over-URLs and handling Methods. // The list is searched in-order, first match gets dispatched. protected static final LinkedHashMap<String,Method> _handlers = new LinkedHashMap<String,Method>(); static final Request _http404; static final Request _http500; public static final Response response404(NanoHTTPD server, Properties parms) { return _http404.serve(server, parms, Request.RequestType.www); } public static final Response response500(NanoHTTPD server, Properties parms) { return _http500.serve(server, parms, Request.RequestType.www); } // initialization ------------------------------------------------------------ static { boolean USE_NEW_TAB = true; _http404 = registerRequest(new HTTP404()); _http500 = registerRequest(new HTTP500()); registerGET("/1/metadata/modelbuilders/.*", ModelBuildersMetadataHandlerV1.class, "show"); registerGET("/1/metadata/modelbuilders", ModelBuildersMetadataHandlerV1.class, "list"); // Data Request.addToNavbar(registerRequest(new ImportFiles2()), "Import Files", "Data"); Request.addToNavbar(registerRequest(new Upload2()), "Upload", "Data"); Request.addToNavbar(registerRequest(new Parse2()), "Parse", "Data"); Request.addToNavbar(registerRequest(new Inspector()), "Inspect", "Data"); Request.addToNavbar(registerRequest(new SummaryPage2()), "Summary", "Data"); Request.addToNavbar(registerRequest(new QuantilesPage()), "Quantiles", "Data"); Request.addToNavbar(registerRequest(new Impute()), "Impute", "Data"); Request.addToNavbar(registerRequest(new Interaction()), "Interaction", "Data"); Request.addToNavbar(registerRequest(new CreateFrame()), "Create Frame", "Data"); Request.addToNavbar(registerRequest(new FrameSplitPage()),"Split Frame", "Data"); Request.addToNavbar(registerRequest(new StoreView()), "View All", "Data"); Request.addToNavbar(registerRequest(new ExportFiles()), "Export Files", "Data"); // Register Inspect2 just for viewing frames registerRequest(new Inspect2()); // FVec models Request.addToNavbar(registerRequest(new DeepLearning()),"Deep Learning", "Model"); Request.addToNavbar(registerRequest(new GLM2()), "Generalized Linear Model", "Model"); Request.addToNavbar(registerRequest(new GBM()), "Gradient Boosting Machine", "Model"); Request.addToNavbar(registerRequest(new KMeans2()), "K-Means Clustering", "Model"); Request.addToNavbar(registerRequest(new PCA()), "Principal Component Analysis", "Model"); Request.addToNavbar(registerRequest(new SpeeDRF()), "Random Forest", "Model"); Request.addToNavbar(registerRequest(new DRF()), "Random Forest - Big Data", "Model"); Request.addToNavbar(registerRequest(new Anomaly()), "Anomaly Detection (Beta)", "Model"); Request.addToNavbar(registerRequest(new CoxPH()), "Cox Proportional Hazards (Beta)", "Model"); Request.addToNavbar(registerRequest(new DeepFeatures()),"Deep Feature Extractor (Beta)", "Model"); Request.addToNavbar(registerRequest(new NaiveBayes()), "Naive Bayes Classifier (Beta)", "Model"); // FVec scoring Request.addToNavbar(registerRequest(new Predict()), "Predict", "Score"); // only for glm to allow for overriding of lambda_submodel registerRequest(new GLMPredict()); Request.addToNavbar(registerRequest(new ConfusionMatrix()), "Confusion Matrix", "Score"); Request.addToNavbar(registerRequest(new AUC()), "AUC", "Score"); Request.addToNavbar(registerRequest(new HitRatio()), "HitRatio", "Score"); Request.addToNavbar(registerRequest(new PCAScore()), "PCAScore", "Score"); Request.addToNavbar(registerRequest(new GainsLiftTable()), "Gains/Lift Table", "Score"); Request.addToNavbar(registerRequest(new Steam()), "Multi-model Scoring (Beta)","Score"); // Admin Request.addToNavbar(registerRequest(new Jobs()), "Jobs", "Admin"); Request.addToNavbar(registerRequest(new Cloud()), "Cluster Status", "Admin"); Request.addToNavbar(registerRequest(new WaterMeterPerfbar()), "Water Meter (Perfbar)", "Admin"); Request.addToNavbar(registerRequest(new LogView()), "Inspect Log", "Admin"); Request.addToNavbar(registerRequest(new JProfile()), "Profiler", "Admin"); Request.addToNavbar(registerRequest(new JStack()), "Stack Dump", "Admin"); Request.addToNavbar(registerRequest(new NetworkTest()), "Network Test", "Admin"); Request.addToNavbar(registerRequest(new IOStatus()), "Cluster I/O", "Admin"); Request.addToNavbar(registerRequest(new Timeline()), "Timeline", "Admin"); Request.addToNavbar(registerRequest(new UDPDropTest()), "UDP Drop Test", "Admin"); Request.addToNavbar(registerRequest(new TaskStatus()), "Task Status", "Admin"); Request.addToNavbar(registerRequest(new Shutdown()), "Shutdown", "Admin"); // Help and Tutorials Request.addToNavbar(registerRequest(new Documentation()), "H2O Documentation", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new Tutorials()), "Tutorials Home", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new TutorialGBM()), "GBM Tutorial", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new TutorialDeepLearning()),"Deep Learning Tutorial", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new TutorialRFIris()), "Random Forest Tutorial", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new TutorialGLMProstate()), "GLM Tutorial", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new TutorialKMeans()), "KMeans Tutorial", "Help", USE_NEW_TAB); Request.addToNavbar(registerRequest(new AboutH2O()), "About H2O", "Help"); // Beta things should be reachable by the API and web redirects, but not put in the menu. if(H2O.OPT_ARGS.beta == null) { registerRequest(new hex.LR2()); registerRequest(new ReBalance()); registerRequest(new NFoldFrameExtractPage()); registerRequest(new Console()); registerRequest(new GapStatistic()); registerRequest(new InsertMissingValues()); registerRequest(new KillMinus3()); registerRequest(new SaveModel()); registerRequest(new LoadModel()); registerRequest(new CollectLinuxInfo()); registerRequest(new SetLogLevel()); registerRequest(new Debug()); registerRequest(new UnlockKeys()); } else { Request.addToNavbar(registerRequest(new hex.LR2()), "Linear Regression2", "Beta"); Request.addToNavbar(registerRequest(new ReBalance()), "ReBalance", "Beta"); Request.addToNavbar(registerRequest(new NFoldFrameExtractPage()),"N-Fold Frame Extract", "Beta"); Request.addToNavbar(registerRequest(new Console()), "Console", "Beta"); Request.addToNavbar(registerRequest(new GapStatistic()), "Gap Statistic", "Beta"); Request.addToNavbar(registerRequest(new InsertMissingValues()), "Insert Missing Values","Beta"); Request.addToNavbar(registerRequest(new KillMinus3()), "Kill Minus 3", "Beta"); Request.addToNavbar(registerRequest(new SaveModel()), "Save Model", "Beta"); Request.addToNavbar(registerRequest(new LoadModel()), "Load Model", "Beta"); Request.addToNavbar(registerRequest(new CollectLinuxInfo()), "Collect Linux Info", "Beta"); Request.addToNavbar(registerRequest(new SetLogLevel()), "Set Log Level", "Beta"); Request.addToNavbar(registerRequest(new Debug()), "Debug Dump (floods log file)","Beta"); Request.addToNavbar(registerRequest(new UnlockKeys()), "Unlock Keys (use with caution)","Beta"); } registerRequest(new Up()); registerRequest(new Get()); // Download //Column Expand registerRequest(new OneHot()); // internal handlers //registerRequest(new StaticHTMLPage("/h2o/CoefficientChart.html","chart")); registerRequest(new Cancel()); registerRequest(new CoxPHModelView()); registerRequest(new CoxPHProgressPage()); registerRequest(new DomainMapping()); registerRequest(new DRFModelView()); registerRequest(new DRFProgressPage()); registerRequest(new DownloadDataset()); registerRequest(new Exec2()); registerRequest(new GBMModelView()); registerRequest(new GBMProgressPage()); registerRequest(new GridSearchProgress()); registerRequest(new LogView.LogDownload()); registerRequest(new NeuralNetModelView()); registerRequest(new NeuralNetProgressPage()); registerRequest(new DeepLearningModelView()); registerRequest(new DeepLearningProgressPage()); registerRequest(new KMeans2Progress()); registerRequest(new KMeans2ModelView()); registerRequest(new NBProgressPage()); registerRequest(new GapStatisticProgressPage()); registerRequest(new NBModelView()); registerRequest(new GapStatisticModelView()); registerRequest(new PCAProgressPage()); registerRequest(new PCAModelView()); registerRequest(new PostFile()); registerRequest(new water.api.Upload2.PostFile()); registerRequest(new Progress2()); registerRequest(new PutValue()); registerRequest(new Remove()); registerRequest(new RemoveAll()); registerRequest(new RemoveAck()); registerRequest(new SpeeDRFModelView()); registerRequest(new SpeeDRFProgressPage()); registerRequest(new water.api.SetColumnNames2()); // Set colnames for FluidVec objects registerRequest(new LogAndEcho()); registerRequest(new ToEnum2()); registerRequest(new ToInt2()); registerRequest(new GLMProgress()); registerRequest(new hex.glm.GLMGridProgress()); registerRequest(new water.api.Levels2()); // Temporary hack to get factor levels efficiently // Typeahead registerRequest(new TypeaheadModelKeyRequest()); registerRequest(new TypeaheadPCAModelKeyRequest()); registerRequest(new TypeaheadHexKeyRequest()); registerRequest(new TypeaheadFileRequest()); registerRequest(new TypeaheadHdfsPathRequest()); registerRequest(new TypeaheadKeysRequest("Existing H2O Key", "", null)); registerRequest(new TypeaheadS3BucketRequest()); // testing hooks registerRequest(new TestPoll()); registerRequest(new TestRedirect()); // registerRequest(new GLMProgressPage2()); registerRequest(new GLMModelView()); registerRequest(new GLMModelUpdate()); registerRequest(new GLMGridView()); // registerRequest(new GLMValidationView()); registerRequest(new LaunchJar()); Request.initializeNavBar(); // Pure APIs, no HTML, to support The New World registerRequest(new Models()); registerRequest(new Frames()); registerRequest(new ModelMetrics()); // WaterMeter support APIs registerRequest(new WaterMeterPerfbar.WaterMeterCpuTicks()); } /** * Registers the request with the request server. */ public static Request registerRequest(Request req) { assert req.supportedVersions().length > 0; for (API_VERSION ver : req.supportedVersions()) { String href = req.href(ver); assert (! _requests.containsKey(href)) : "Request with href "+href+" already registered"; _requests.put(href,req); req.registered(ver); } return req; } public static void unregisterRequest(Request req) { for (API_VERSION ver : req.supportedVersions()) { String href = req.href(ver); _requests.remove(href); } } /** Registers the request with the request server. */ public static String registerGET (String url, Class hclass, String hmeth) { return register("GET" ,url,hclass,hmeth); } public static String registerPUT (String url, Class hclass, String hmeth) { return register("PUT" ,url,hclass,hmeth); } public static String registerDELETE(String url, Class hclass, String hmeth) { return register("DELETE",url,hclass,hmeth); } public static String registerPOST (String url, Class hclass, String hmeth) { return register("POST" ,url,hclass,hmeth); } private static String register(String method, String url, Class hclass, String hmeth) { try { assert lookup(method,url)==null; // Not shadowed Method meth = hclass.getDeclaredMethod(hmeth); _handlers.put(method+url,meth); return url; } catch( NoSuchMethodException nsme ) { throw new Error("NoSuchMethodException: "+hclass.getName()+"."+hmeth); } } // Lookup the method/url in the register list, and return a matching Method private static Method lookup( String method, String url ) { String s = method+url; for( String x : _handlers.keySet() ) if( x.equals(s) ) // TODO: regex return _handlers.get(x); return null; } // Handling ------------------------------------------------------------------ private Schema handle( Request.RequestType type, Method meth, int version, Properties parms ) throws Exception { Schema S; switch( type ) { // case html: // These request-types only dictate the response-type; case java: // the normal action is always done. case json: case xml: { Class x = meth.getDeclaringClass(); Class<Handler> clz = (Class<Handler>)x; Handler h = clz.newInstance(); return h.handle(version,meth,parms); // Can throw any Exception the handler throws } case query: case help: default: throw H2O.unimpl(); } } private Response wrap( String http_code, Schema S, RequestStatics.RequestType type ) { // Convert Schema to desired output flavor switch( type ) { case json: return new Response(http_code, MIME_JSON, new String(S.writeJSON(new AutoBuffer()).buf())); /* case xml: //return new Response(http_code, MIME_XML , new String(S.writeXML (new AutoBuffer()).buf())); case java: throw H2O.unimpl(); case html: { RString html = new RString(_htmlTemplate); html.replace("CONTENTS", S.writeHTML(new water.util.DocGen.HTML()).toString()); return new Response(http_code, MIME_HTML, html.toString()); } */ default: throw H2O.fail(); } } // Keep spinning until we get to launch the NanoHTTPD public static void start() { new Thread( new Runnable() { @Override public void run() { while( true ) { try { // Try to get the NanoHTTP daemon started SERVER = new RequestServer(H2O._apiSocket); break; } catch( Exception ioe ) { Log.err(Sys.HTTPD,"Launching NanoHTTP server got ",ioe); try { Thread.sleep(1000); } catch( InterruptedException e ) { } // prevent denial-of-service } } } }, "Request Server launcher").start(); } public static String maybeTransformRequest (String uri) { if (uri.isEmpty() || uri.equals("/")) { return "/Tutorials.html"; } Pattern p = Pattern.compile("/R/bin/([^/]+)/contrib/([^/]+)(.*)"); Matcher m = p.matcher(uri); boolean b = m.matches(); if (b) { // On Jenkins, this command sticks his own R version's number // into the package that gets built. // // R CMD INSTALL -l $(TMP_BUILD_DIR) --build h2o-package // String versionOfRThatJenkinsUsed = "3.0"; String platform = m.group(1); String version = m.group(2); String therest = m.group(3); String s = "/R/bin/" + platform + "/contrib/" + versionOfRThatJenkinsUsed + therest; return s; } return uri; } // uri serve ----------------------------------------------------------------- void maybeLogRequest (String uri, String method, Properties parms) { boolean filterOutRepetitiveStuff = true; String log = String.format("%-4s %s", method, uri); for( Object arg : parms.keySet() ) { String value = parms.getProperty((String) arg); if( value != null && value.length() != 0 ) log += " " + arg + "=" + value; } Log.info_no_stdout(Sys.HTLOG, log); if (filterOutRepetitiveStuff) { if (uri.endsWith(".css")) return; if (uri.endsWith(".js")) return; if (uri.endsWith(".png")) return; if (uri.endsWith(".ico")) return; if (uri.startsWith("/Typeahead")) return; if (uri.startsWith("/2/Typeahead")) return; if (uri.endsWith("LogAndEcho.json")) return; if (uri.startsWith("/Cloud.json")) return; if (uri.contains("Progress")) return; if (uri.startsWith("/Jobs.json")) return; if (uri.startsWith("/Up.json")) return; if (uri.startsWith("/2/WaterMeter")) return; } Log.info(Sys.HTTPD, log); } ///////// Stuff for URL parsing brought over from H2O2: /** Returns the name of the request, that is the request url without the * request suffix. E.g. converts "/GBM.html/crunk" into "/GBM/crunk" */ String requestName(String url) { String s = "."+toString(); int i = url.indexOf(s); if( i== -1 ) return url; // No, or default, type return url.substring(0,i)+url.substring(i+s.length()); } // Parse version number. Java has no ref types, bleah, so return the version // number and the "parse pointer" by shift-by-16 compaction. // /1/xxx --> version 1 // /2/xxx --> version 2 // /v1/xxx --> version 1 // /v2/xxx --> version 2 // /latest/xxx--> LATEST_VERSION // /xxx --> LATEST_VERSION private int parseVersion( String uri ) { if( uri.length() <= 1 || uri.charAt(0) != '/' ) // If not a leading slash, then I am confused return (0<<16)|LATEST_VERSION; if( uri.startsWith("/latest") ) return (("/latest".length())<<16)|LATEST_VERSION; int idx=1; // Skip the leading slash int version=0; char c = uri.charAt(idx); // Allow both /### and /v### if( c=='v' ) c = uri.charAt(++idx); while( idx < uri.length() && '0' <= c && c <= '9' ) { version = version*10+(c-'0'); c = uri.charAt(++idx); } if( idx > 10 || version > LATEST_VERSION || version < 1 || uri.charAt(idx) != '/' ) return (0<<16)|LATEST_VERSION; // Failed number parse or baloney version // Happy happy version return (idx<<16)|version; } @Override public NanoHTTPD.Response serve( String uri, String method, Properties header, Properties parms ) { // Jack priority for user-visible requests Thread.currentThread().setPriority(Thread.MAX_PRIORITY-1); // update arguments and determine control variables uri = maybeTransformRequest(uri); // determine the request type Request.RequestType type = Request.RequestType.requestType(uri); String requestName = type.requestName(uri); maybeLogRequest(uri, method, parms); // determine version int version = parseVersion(uri); int idx = version>>16; version &= 0xFFFF; String uripath = uri.substring(idx); String path = requestName(uripath); // Strip suffix type from middle of URI Method meth = null; try { // Find handler for url meth = lookup(method,path); if (meth != null) { return wrap(HTTP_OK,handle(type,meth,version,parms),type); } } catch( IllegalArgumentException e ) { return wrap(HTTP_BADREQUEST,new HTTP404V1(e.getMessage(),uri),type); } catch( Exception e ) { // make sure that no Exception is ever thrown out from the request return wrap(e.getMessage()!="unimplemented"? HTTP_INTERNALERROR : HTTP_NOTIMPLEMENTED, new HTTP500V1(e),type); } // Wasn't a new type of handler: try { // determine if we have known resource Request request = _requests.get(requestName); // if the request is not know, treat as resource request, or 404 if not // found if (request == null) return getResource(uri); // Some requests create an instance per call request = request.create(parms); // call the request return request.serve(this,parms,type); } catch( Exception e ) { if(!(e instanceof ExpectedExceptionForDebug)) e.printStackTrace(); // make sure that no Exception is ever thrown out from the request parms.setProperty(Request.ERROR,e.getClass().getSimpleName()+": "+e.getMessage()); return _http500.serve(this,parms,type); } } private RequestServer( ServerSocket socket ) throws IOException { super(socket,null); } // Resource loading ---------------------------------------------------------- // Returns the response containing the given uri with the appropriate mime // type. private NanoHTTPD.Response getResource(String uri) { byte[] bytes = _cache.get(uri); if( bytes == null ) { InputStream resource = Boot._init.getResource2(uri); if (resource != null) { try { bytes = ByteStreams.toByteArray(resource); } catch( IOException e ) { Log.err(e); } byte[] res = _cache.putIfAbsent(uri,bytes); if( res != null ) bytes = res; // Racey update; take what is in the _cache } Closeables.closeQuietly(resource); } if ((bytes == null) || (bytes.length == 0)) { // make sure that no Exception is ever thrown out from the request Properties parms = new Properties(); parms.setProperty(Request.ERROR,uri); return _http404.serve(this,parms,Request.RequestType.www); } String mime = NanoHTTPD.MIME_DEFAULT_BINARY; if (uri.endsWith(".css")) mime = "text/css"; else if (uri.endsWith(".html")) mime = "text/html"; // return new NanoHTTPD.Response(NanoHTTPD.HTTP_OK,mime,new ByteArrayInputStream(bytes)); NanoHTTPD.Response res = new NanoHTTPD.Response(NanoHTTPD.HTTP_OK,mime,new ByteArrayInputStream(bytes)); res.addHeader("Content-Length", Long.toString(bytes.length)); // res.addHeader("Content-Disposition", "attachment; filename=" + uri); return res; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/RequestStatics.java
package water.api; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.Map; import java.util.regex.Pattern; import water.util.Log; import dontweave.gson.JsonElement; import dontweave.gson.JsonObject; /** All statics for the Request api. * * Especially the JSON property names should be defined here. Some helper * functions too. * * @author peta */ public class RequestStatics extends Constants { /** Each request name is derived from name of class serving the request. */ public final String requestName() { return getClass().getSimpleName(); } /** Request type. * * Requests can have multiple types. Basic types include the plain json type * in which the result is returned as a JSON object, a html type that acts as * the webpage, or the help type that displays the extended help for the * request. * * The wiki type is also added that displays the markup of the wiki that * should be used to document the request as per Matt's suggestion. * * NOTE the requests are distinguished by their suffixes. Please make the * suffix start with the dot character to avoid any problems with request * names. */ public static enum RequestType { json(".json"), ///< json type request, a result is a JSON structure www(".html"), ///< webpage request help(".help"), ///< should display the help on the given request query(".query"), ///< Displays the query for the argument in html mode png(".png"), ///< image, e.g. plot txt(".txt"), ///< text, e.g. a script java(".java"), ///< java program xml(".xml"), ///< xml request ; /** Suffix of the request - extension of the URL. */ public final String _suffix; RequestType(String suffix) { _suffix = suffix; } /** Returns the request type of a given URL. JSON request type is the default * type when the extension from the URL cannot be determined. */ public static RequestType requestType(String requestUrl) { if (requestUrl.endsWith(www._suffix)) return www; if (requestUrl.endsWith(help._suffix)) return help; if (requestUrl.endsWith(query._suffix)) return query; if (requestUrl.endsWith(png._suffix)) return png; if (requestUrl.endsWith(txt._suffix)) return txt; if (requestUrl.endsWith(java._suffix)) return java; if (requestUrl.endsWith(xml._suffix)) return xml; return json; } /** Returns the name of the request, that is the request url without the * request suffix. */ public String requestName(String requestUrl) { String result = (requestUrl.endsWith(_suffix)) ? requestUrl.substring(0, requestUrl.length()-_suffix.length()) : requestUrl; return result; } } /** Returns the name of the JSON property pretty printed. That is spaces * instead of underscores and capital first letter. * @param name * @return */ public static String JSON2HTML(String name) { if( name.length() < 1 ) return name; if(name == "row") { return name.substring(0,1).toUpperCase()+ name.replace("_"," ").substring(1); } return name.substring(0,1)+name.replace("_"," ").substring(1); } public static String Str2JSON( String x ) { if( checkJsonName(x) ) return x; StringBuilder sb = new StringBuilder(); byte[] bs = x.getBytes(); if( bs.length==0 || !Character.isJavaIdentifierStart(bs[0]) ) sb.append("x"); for( byte b : bs ) if( Character.isJavaIdentifierPart(b) ) sb.append(Character.toLowerCase((char)b)); else if( Character.isWhitespace(b) ) sb.append('_'); else if( b=='-' ) sb.append('_'); String s = sb.toString(); assert checkJsonName(s); return s; } private static Pattern _correctJsonName = Pattern.compile("^[_a-z][_a-z0-9]*$"); /** Checks if the given JSON name is valid. A valid JSON name is a sequence of * small letters, numbers and underscores that does not start with number. */ public static boolean checkJsonName(String name) { return _correctJsonName.matcher(name).find(); } protected static JsonObject jsonError(String error) { JsonObject result = new JsonObject(); result.addProperty(ERROR, error); return result; } protected static String encodeRedirectArgs(JsonObject args, Object[] args2) { if( args == null && args2 == null ) return ""; if( args2 != null ) { StringBuilder sb = new StringBuilder(); assert (args2.length &1)==0 : "Number of arguments shoud be power of 2."; // Must be field-name / value pairs for( int i=0; i<args2.length; i+=2 ) { sb.append(i==0?'?':'&').append(args2[i]).append('='); try { sb.append(URLEncoder.encode(args2[i+1].toString(),"UTF-8")); } catch( UnsupportedEncodingException ex ) { throw Log.errRTExcept(ex); } } return sb.toString(); } StringBuilder sb = new StringBuilder(); sb.append("?"); for (Map.Entry<String,JsonElement> entry : args.entrySet()) { JsonElement e = entry.getValue(); if (sb.length()!=1) sb.append("&"); sb.append(entry.getKey()); sb.append("="); try { sb.append(URLEncoder.encode(e.getAsString(),"UTF-8")); } catch( UnsupportedEncodingException ex ) { throw Log.errRTExcept(ex); } } return sb.toString(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/SaveModel.java
package water.api; import static water.util.FSUtils.isHdfs; import static water.util.FSUtils.isS3N; import java.io.File; import java.io.IOException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import water.Func; import water.Model; import water.persist.PersistHdfs; import water.serial.Model2FileBinarySerializer; import water.serial.Model2HDFSBinarySerializer; import water.util.FSUtils; public class SaveModel extends Func { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @API(help = "Model to save.", required=true, filter=Default.class) Model model; @API(help = "Name of file to save the model.", required = true, filter = Default.class, json=true, gridable = false) String path; @API(help="Overwrite existing files.", required = false, filter = Default.class, gridable = false) boolean force = false; @Override protected void execImpl() { if (isHdfs(path) || isS3N(path)) saveToHdfs(); else saveToLocalFS(); } private void saveToLocalFS() { File f = new File(path); if (!force && f.exists()) throw new IllegalArgumentException("The file " + path + " already exists!"); try { new Model2FileBinarySerializer().save(model, new File(path)); } catch( IOException e ) { throw new IllegalArgumentException("Cannot save file " + path, e); } } private void saveToHdfs() { if (FSUtils.isBareS3NBucketWithoutTrailingSlash(path)) { path += "/"; } Path f = new Path(path); try { FileSystem fs = FileSystem.get(f.toUri(), PersistHdfs.CONF); new Model2HDFSBinarySerializer(fs, force).save(model, f); } catch( IOException e ) { throw new IllegalArgumentException("Cannot save file " + path, e); } } @Override public boolean toHTML(StringBuilder sb) { sb.append("<div class=\"alert alert-success\">") .append("Model ") .append(Inspector.link(model._key.toString(), model._key.toString())) .append(" was sucessfuly saved to <b>"+path+"</b> file."); sb.append("</div>"); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/SetColumnNames2.java
package water.api; import water.*; import water.fvec.Frame; import water.util.RString; public class SetColumnNames2 extends Request2 { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Sets the column names of a frame."; @API(help="An existing H2O Frame key.", required=true, filter=Default.class) Frame source; class colsFilter1 extends MultiVecSelect { public colsFilter1() { super("source");} } @API(help = "Select columns", filter=colsFilter1.class) int[] cols; @API(help="Column names as a parsed frame.", filter=Default.class) Frame copy_from; @API(help="Column names as a vector of strings.", gridable=false, filter=Default.class) String comma_separated_list; public static String link(Key k, String content) { RString rs = new RString("<a href='SetColumnNames2.query?source=%$key'>"+content+"</a>"); rs.replace("key", k.toString()); return rs.toString(); } @Override protected Response serve() { if( source == null ) return RequestServer._http404.serve(); // select all columns by default if( cols == null ) { cols = new int[source.vecs().length]; for(int i = 0; i < cols.length; i++) cols[i] = i; } if(comma_separated_list == null && copy_from == null) throw new IllegalArgumentException("No column names given"); else if(comma_separated_list != null && copy_from != null) throw new IllegalArgumentException("Cannot specify column names as both frame and list."); String[] names_str = comma_separated_list == null ? copy_from.names() : comma_separated_list.split(","); if(cols.length != names_str.length) throw new IllegalArgumentException("number of columns don't match!"); for(int i = 0; i < cols.length; i++) source._names[cols[i]] = names_str[i]; Futures fs = new Futures(); DKV.put(source._key, source, fs); fs.blockForPending(); return Inspect2.redirect(this, source._key.toString()); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/SetLogLevel.java
package water.api; import water.*; import water.api.RequestServer.API_VERSION; import water.util.Log; public class SetLogLevel extends Func { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text for GET. static final String DOC_GET = "Set runtime log4j log level"; @API(help = "The new log level (1 == trace, 2 == debug, 3 == info, 4 == warn, 5 == error, 6 == fatal)", required = true, filter = Default.class, lmin = 1, lmax = 6) public int log_level = 1; private static class SetLogLevelTask extends DRemoteTask { public int _log_level; @Override public void lcompute() { Log.setLogLevel(_log_level); tryComplete(); } @Override public void reduce(DRemoteTask drt) {} } @Override protected void execImpl() { SetLogLevelTask task = new SetLogLevelTask(); task._log_level = log_level; task.invokeOnAllNodes(); } @Override public API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Shutdown.java
package water.api; import java.util.TimerTask; import water.H2O; import water.UDPRebooted; import dontweave.gson.JsonObject; public class Shutdown extends Request { public Shutdown() { _requestHelp = "Shutdown the cloud."; } @Override public Response serve() { java.util.Timer t = new java.util.Timer("Shutdown Timer"); t.schedule(new TimerTask() { @Override public void run() { UDPRebooted.T.shutdown.send(H2O.SELF); H2O.exit(0); } }, 100); JsonObject json = new JsonObject(); json.addProperty(STATUS, "shutting down"); return Response.done(json); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/StaticHTMLPage.java
package water.api; import water.Boot; public class StaticHTMLPage extends HTMLOnlyRequest { private final String _html; private final String _href; public StaticHTMLPage(String file, String href) { _href = href; _html = Boot._init.loadContent(file); } @Override protected String build(Response response) { return _html; } @Override public String href() { return _href; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Steam.java
package water.api; /** * Redirect to online documentation page. */ public class Steam extends HTMLOnlyRequest { protected String build(Response response) { return "<meta http-equiv=\"refresh\" content=\"0; url=steam/index.html\">"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/StoreView.java
package water.api; import dontweave.gson.*; import java.util.*; import water.*; import water.fvec.Frame; import water.fvec.Vec; public class StoreView extends Request { public static final int MAX_VIEW = 1000000; protected Str _filter = new Str(FILTER, ""); protected final Int _offset = new Int(OFFSET, 0, 0, Integer.MAX_VALUE); protected final Int _view = new Int(VIEW, 20, 0, MAX_VIEW); @Override protected Response serve() { JsonObject result = new JsonObject(); // get the offset index final int offset = _offset.value(); final int view = _view.value(); final String filter = _filter.value(); // Gather some keys that pass all filters H2O.KeySnapshot ks = H2O.KeySnapshot.globalSnapshot(); if(filter != null) ks = ks.filter(new H2O.KVFilter() { @Override public boolean filter(H2O.KeyInfo k) { return k._key.toString().indexOf(filter) != -1; } }); final H2O.KeyInfo [] kinfos = ks._keyInfos; if( ks._keyInfos.length > offset+view ) result.addProperty(Constants.MORE,true); // Now build the result JSON with all available keys final H2O cloud = H2O.CLOUD; // Current eldest Cloud JsonArray ary = new JsonArray(); int len = Math.min(kinfos.length,offset+view); for( int i=offset; i<len; i++ ) { Value val = DKV.get(kinfos[i]._key); if( val != null ) ary.add(formatKeyRow(cloud,kinfos[i]._key,val)); } result.add(KEYS,ary); result.addProperty(NUM_KEYS, len-offset); result.addProperty(CLOUD_NAME, H2O.NAME); result.addProperty(NODE_NAME, H2O.SELF.toString()); Response r = Response.done(result); r.addHeader( "<form class='well form-inline' action='StoreView.html'>" + " <input type='text' class='input-small span10' placeholder='filter' " + " name='filter' id='filter' value='"+_filter.value()+"' maxlength='512'>" + " <button type='submit' class='btn btn-primary'>Filter keys!</button>" + "</form>"); r.setBuilder(KEYS, new PaginatedTable(argumentsToJson(),offset,view,kinfos.length,false)); r.setBuilder(KEYS+"."+KEY, new KeyCellBuilder()); r.setBuilder(KEYS+".col_0", new KeyMinAvgMaxBuilder()); r.setBuilder(KEYS+".col_1", new KeyMinAvgMaxBuilder()); r.setBuilder(KEYS+".col_2", new KeyMinAvgMaxBuilder()); r.setBuilder(KEYS+".col_3", new KeyMinAvgMaxBuilder()); r.setBuilder(KEYS+".col_4", new KeyMinAvgMaxBuilder()); r.setBuilder(MORE, new HideBuilder()); return r; } static private String noNaN( double d ) { return (Double.isNaN(d) || Double.isInfinite(d)) ? "" : Double.toString(d); } // Used by tests public String setAndServe(String offset) { _offset.reset(); _offset.check(null,offset); _view .reset(); _view .check(null,"20"); _filter.reset(); return new Gson().toJson(serve()._response); } private JsonObject formatKeyRow(H2O cloud, Key key, Value val) { JsonObject result = new JsonObject(); result.addProperty(KEY, key.toString()); result.addProperty(VALUE_SIZE,val.length()); JsonObject mt = new JsonObject(); JsonObject jcols[] = new JsonObject[]{mt,mt,mt,mt,mt}; long rows = -1; int cols = -1; String str = ""; if(val.isFrame()){ Frame fr = val.get(); rows = fr.numRows(); cols = fr.numCols(); result.addProperty(ROWS,rows); // exact rows result.addProperty(COLS,cols); // exact cols for( int i = 0; i < jcols.length; ++i ) { JsonObject col = new JsonObject(); if (i < cols) { Vec v = fr.vecs()[i]; col.addProperty(HEADER,fr._names[i]); if( !v.isEnum()) { col.addProperty(MIN , noNaN(v.min() )); col.addProperty(MEAN, noNaN(v.mean())); col.addProperty(MAX , noNaN(v.max() )); } else if( v.domain().length > 0 ) { int max = v.domain().length; col.addProperty(MIN , v.domain()[0]); col.addProperty(MEAN, v.domain()[max/2]); col.addProperty(MAX , v.domain()[max-1]); } } jcols[i] = col; } } /* // Whatever this is trying to do, it's not helping. // I think this is trying to decode data files that have been POSTed, but instead it's just // corrupting StoreView output. // Maybe turn this on again once it understands how to disambiguate different data types. // // Tom if( rows == -1 ) { byte [] bits = Utils.getFirstUnzipedBytes(val); PSetupGuess sguess = ParseDataset.guessSetup(bits); if(sguess != null && sguess.valid() && sguess._data != null && sguess._data.length >= 4 && sguess._setup._ncols > 0 ) { // Able to parse sanely? int zipped_len = val.getFirstBytes().length; double bytes_per_row = (double) zipped_len / sguess._data.length; rows = (long) (val.length() / bytes_per_row); cols = sguess._setup._ncols; result.addProperty(ROWS, "~" + rows); result.addProperty(COLS, cols); final int len = sguess._data.length; for( int i=0; i<Math.min(cols,jcols.length); i++ ) { JsonObject col = new JsonObject(); if(len > 0 && i < sguess._data[0].length) col.addProperty(HEADER,sguess._data[0][i]); // First 4 rows, including labels if(len > 1 && i < sguess._data[1].length) col.addProperty(MIN ,sguess._data[1][i]); // as MIN/MEAN/MAX if(len > 2 && i < sguess._data[2].length) col.addProperty(MEAN ,sguess._data[2][i]); if(len > 3 && i < sguess._data[3].length) col.addProperty(MAX ,sguess._data[3][i]); jcols[i] = col; } } else { result.addProperty(ROWS,""); // no rows result.addProperty(COLS,""); } // Now the first 100 bytes of Value as a String StringBuilder sb = new StringBuilder(); byte[] b = bits; // Unzipped bits, if any int newlines=0; int len = Math.min(b.length,100); for( int i=0; i<len; i++ ) { byte c = b[i]; if( c == '&' ) sb.append("&amp;"); else if( c == '<' ) sb.append("&lt;"); else if( c == '>' ) sb.append("&gt;"); else if( c == '\r' ) ; // ignore windows crlf else if( c == '\n' ) { // newline if( ++newlines >= 4 ) break; // limit to 4 lines visually sb.append("<br>"); // visual newline } else if( c == ',' && i+1<len && b[i+1]!=' ' ) sb.append(", "); else if( c < 32 ) sb.append('?'); else sb.append((char)c); } if( val.length() > len ) sb.append("..."); str = sb.toString(); } */ // Instead of the brokenness above, just paste in the empty string. result.addProperty(ROWS,""); result.addProperty(COLS,""); for( int i=0; i<jcols.length; i++ ) result.add("col_"+i,jcols[i]); result.addProperty(VALUE,str); // VALUE last in the JSON return result; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/SummaryPage2.java
package water.api; import hex.Summary2; import water.*; import water.util.Log; import water.util.RString; import water.fvec.*; import water.util.Utils; import java.util.Iterator; /** * */ public class SummaryPage2 extends Request2 { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. // This Request supports the HTML 'GET' command, and this is the help text // for GET. static final String DOC_GET = "Returns a summary of a fluid-vec frame"; @API(help="An existing H2O Frame key.", required=true, filter=Default.class) Frame source; class colsFilter1 extends MultiVecSelect { public colsFilter1() { super("source");} } @API(help = "Select columns", filter=colsFilter1.class) int[] cols; @API(help = "Maximum columns to show summaries of", filter = Default.class, lmin = 1) int max_ncols = 1000; @API(help = "Number of bins for quantile (1-10000000)", filter = Default.class, lmin = 1, lmax = 1000000) int max_qbins = 1000; @API(help = "Column summaries.") Summary2[] summaries; public static String link(Key k, String content) { RString rs = new RString("<a href='SummaryPage2.query?source=%$key'>"+content+"</a>"); rs.replace("key", k.toString()); return rs.toString(); } @Override protected Response serve() { if( source == null ) return RequestServer._http404.serve(); // select all columns by default if( cols == null ) { cols = new int[Math.min(source.vecs().length,max_ncols)]; for(int i = 0; i < cols.length; i++) cols[i] = i; } Vec[] vecs = new Vec[cols.length]; String[] names = new String[cols.length]; for (int i = 0; i < cols.length; i++) { vecs[i] = source.vecs()[cols[i]]; names[i] = source._names[cols[i]]; } Frame fr = new Frame(names, vecs); if(fr.numRows() == 0) throw new IllegalArgumentException("Data frame has zero rows!"); Futures fs = new Futures(); for( Vec vec : vecs) vec.rollupStats(fs); fs.blockForPending(); Summary2.BasicStat basicStats[] = new Summary2.PrePass().doAll(fr).finishUp()._basicStats; summaries = new Summary2.SummaryTask2(basicStats, max_qbins).doAll(fr)._summaries; if (summaries != null) for (int i = 0; i < cols.length; i++) summaries[i].finishUp(vecs[i]); return Response.done(this); } @Override public boolean toHTML( StringBuilder sb ) { sb.append("<div class=container-fluid'>"); sb.append("<div class='row-fluid'>"); sb.append("<div class='span2' style='overflow-y:scroll;height:100%;left:0;position:fixed;text-align:right;overflow-x:scroll;'><h5>Columns</h5>"); if (summaries != null && summaries.length > max_ncols) sb.append("<div class='alert'>Too many columns were selected. "+max_ncols+" of them are shown!</div>"); StringBuilder innerPageBdr = null; if (summaries != null) { innerPageBdr = new StringBuilder("<div class='span10' style='float:right;height:90%;overflow-y:scroll'>"); for( int i = 0; i < Math.min(summaries.length,max_ncols); i++) { String cname = source._names[cols[i]]; Summary2 s2 = summaries[i]; s2.toHTML(source.vecs()[cols[i]],cname,innerPageBdr); sb.append("<div><a href='#col_" + cname + "'>" + cname + "</a></div>"); } innerPageBdr.append("</div>"); } sb.append("</div>"); sb.append("</div>"); if (summaries != null) sb.append(innerPageBdr); sb.append("</div>"); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TaskStatus.java
package water.api; import water.*; import water.H2ONode.TaskInfo; import water.H2ONode.task_status; import java.util.Arrays; /** * Created by tomasnykodym on 9/17/14. */ public class TaskStatus extends Request2 { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public static class NodeTaskInfo extends Iced { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help="node name") final String _node; @API(help="tasks sent here by remote nodes") final TaskInfo[][] _remotes; @API(help="pending tasks sent by me to remotes") final TaskInfo [] _pending; public NodeTaskInfo(TaskInfo [] pending, TaskInfo[][] ts) { _node = H2O.SELF.toString(); _remotes = ts; _pending = pending; } } public static class GetTaskInfo extends DRemoteTask<GetTaskInfo>{ NodeTaskInfo [] _infos; @Override public void reduce(GetTaskInfo drt) { if(_infos == null) _infos = drt._infos; else { for(int i = 0; i < _infos.length; ++i){ if(_infos[i] == null) _infos[i] = drt._infos[i]; } } } @Override public void lcompute() { _infos = new NodeTaskInfo[H2O.CLOUD.size()]; TaskInfo [][] ts = new TaskInfo[H2O.CLOUD.size()][]; int i = 0; for (H2ONode n : H2O.CLOUD._memary) ts[i++] = n.currentTasksInfo(); RPC [] pendingRPCs = UDPTimeOutThread.pendingRPCs(); TaskInfo [] pending = new TaskInfo[pendingRPCs.length]; for(int j = 0; j < pendingRPCs.length; ++j) pending[j] = new TaskInfo(pendingRPCs[j].task(),pendingRPCs[j].taskNum(),pendingRPCs[j].target().index(), pendingRPCs[j].isDone()? task_status.DONE:task_status.CMP,pendingRPCs[j]._callCnt); _infos[H2O.SELF.index()] = new NodeTaskInfo(pending,ts); tryComplete(); } } @API(help="task infos for each node") NodeTaskInfo[] _infos; @Override protected Response serve() { _infos = new GetTaskInfo().invokeOnAllNodes()._infos; return Response.done(this); } public boolean toHTML( StringBuilder sb ) { for(NodeTaskInfo x:_infos) { sb.append("<div>"); sb.append("<h3>" + x._node + "</h3>"); sb.append("<table class='table table-bordered table-condensed'>"); for(int i = 0; i < H2O.CLOUD.size(); ++i){ if(H2O.CLOUD._memary[i] == H2O.SELF) continue; sb.append("<tr>"); sb.append("<th>Pending[" + H2O.CLOUD._memary[i] + "]</th>"); sb.append("<td>"); for(TaskInfo ti:x._pending) if(ti.nodeId == i) sb.append(" " + ti.toString()); sb.append("</td>"); sb.append("</tr>"); } int i = 0; for (TaskInfo[] ti : x._remotes) { sb.append("<tr>"); sb.append("<th>Remote[" + H2O.CLOUD._memary[i++] + "]</th>"); sb.append("<td>"); sb.append(Arrays.deepToString(ti)); sb.append("</td>"); sb.append("</tr>"); } sb.append("</table>"); sb.append("</div>"); sb.append("</p>"); } return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TestPoll.java
package water.api; import dontweave.gson.JsonObject; public class TestPoll extends Request { // not isolated to the context, don't care public static volatile int _counter = 0; public final Str _haha = new Str("hoho"); @Override protected Response serve() { ++_counter; JsonObject resp = new JsonObject(); resp.addProperty("hoho",_haha.value()); resp.addProperty("counter",_counter); if (_counter == 10) { _counter = 0; return Response.done(resp); } else { return Response.poll(resp,_counter,10); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TestRedirect.java
package water.api; import dontweave.gson.JsonObject; public class TestRedirect extends Request { @Override protected Response serve() { JsonObject resp = new JsonObject(); resp.addProperty("hoho","hehe"); return Response.redirect(resp, TestPoll.class,resp); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Timeline.java
package water.api; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import water.*; import water.util.TimelineSnapshot; import dontweave.gson.*; public class Timeline extends Request { private static final String JSON_BYTES = "bytes"; private static final String JSON_RECV = "recv"; private static final String JSON_SEND = "send"; private static final String JSON_DROP = "drop"; private static final String JSON_NANOS = "nanos"; private static final String JSON_TIME = "time"; private static final String JSON_UDPTCP = "udp_tcp"; private static final String JSON_RECVS = "recvs"; private static final String JSON_SENDS = "sends"; private static final String JSON_DROPS = "drops"; private static final String JSON_CLOUD = "cloud"; private static final String JSON_CLOUDS = "clouds"; private static final String JSON_LAST_TIME = "lastTime"; private static final String JSON_FIRST_TIME = "firstTime"; private static final String JSON_TYPE = "type"; private static final String JSON_SR = "sr"; private static final String JSON_EVENTS = "events"; private static final String JSON_SELF = "self"; private static final String JSON_NOW = "now"; public Timeline() { _requestHelp = "Display a timeline of recent network traffic for debugging"; } @Override public Response serve() { long ctm = System.currentTimeMillis(); long[][] snapshot = TimeLine.system_snapshot(); H2O cloud = TimeLine.CLOUD; TimelineSnapshot events = new TimelineSnapshot(cloud, snapshot); SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss:SSS"); JsonObject resJson = new JsonObject(); resJson.addProperty(JSON_NOW, sdf.format(new Date(ctm))); resJson.addProperty(JSON_SELF, H2O.SELF.toString()); JsonArray eventsJson = new JsonArray(); resJson.add(JSON_EVENTS, eventsJson); ArrayList<TimelineSnapshot.Event> heartbeats = new ArrayList(); for( TimelineSnapshot.Event event : events ) { H2ONode h2o = cloud._memary[event._nodeId]; // The event type. First get payload. long l0 = event.dataLo(); long h8 = event.dataHi(); int udp_type = (int)(l0&0xff); // First byte is UDP packet type UDP.udp e = UDP.getUdp(udp_type); // Accumulate repeated heartbeats if( e == UDP.udp.heartbeat ) { heartbeats.add(event); continue; } // Now dump out accumulated heartbeats if( !heartbeats.isEmpty() ) { long firstMs = heartbeats.get(0).ms(); long lastMs = heartbeats.get(heartbeats.size()-1).ms(); int totalSends = 0; int totalRecvs = 0; int totalDrops = 0; int[] sends = new int[cloud.size()]; int[] recvs = new int[cloud.size()]; for(TimelineSnapshot.Event h : heartbeats){ if( h.isSend() ) { ++totalSends; ++sends[h._nodeId]; } else if( h.isDropped() ) { ++totalDrops; } else { ++totalRecvs; ++recvs[h._nodeId]; } } heartbeats.clear(); JsonObject hbJson = new JsonObject(); eventsJson.add(hbJson); hbJson.addProperty(JSON_TYPE, "heartbeat"); hbJson.addProperty(JSON_FIRST_TIME, sdf.format(new Date(firstMs))); hbJson.addProperty(JSON_LAST_TIME , sdf.format(new Date( lastMs))); hbJson.addProperty(JSON_SENDS, totalSends); hbJson.addProperty(JSON_RECVS, totalRecvs); hbJson.addProperty(JSON_DROPS, totalDrops); JsonArray cloudListJson = new JsonArray(); hbJson.add(JSON_CLOUDS, cloudListJson); for( int i = 0; i < sends.length; ++i ) { JsonObject cloudJson = new JsonObject(); cloudListJson.add(cloudJson); cloudJson.addProperty(JSON_CLOUD, TimeLine.CLOUD._memary[i].toString()); cloudJson.addProperty(JSON_SENDS, sends[i]); cloudJson.addProperty(JSON_RECVS, recvs[i]); } } // Break down time into something readable long ms = event.ms(); // Event happened msec long ns = event.ns(); // Event happened nanosec String date = sdf.format(new Date(ms)); JsonObject eventJson = new JsonObject(); eventsJson.add(eventJson); eventJson.addProperty(JSON_UDPTCP, event.ioflavor()); eventJson.addProperty(JSON_TIME, date); eventJson.addProperty(JSON_NANOS, ns); eventJson.addProperty(JSON_TYPE, e.toString()); eventJson.addProperty(JSON_SR, event.isSend()); if( event.isSend() ) { eventJson.addProperty(JSON_SEND, h2o.toString()); String recv = event.packH2O() == null ? "multicast" : event.packH2O().toString(); eventJson.addProperty(JSON_RECV, recv); } else { eventJson.addProperty(JSON_SEND, event.packH2O().toString()); eventJson.addProperty(JSON_RECV, h2o.toString()); if( event.isDropped() ) eventJson.addProperty(JSON_DROP, "1"); } eventJson.addProperty(JSON_BYTES, UDP.printx16(l0,h8)); } Response r = Response.done(resJson); r.setBuilder(JSON_EVENTS, new EventTableBuilder()); return r; } private static class EventTableBuilder extends ArrayBuilder { @Override public String header(JsonArray array) { return "<table class='table table-striped table-bordered'>\n<thead>" + "<th>hh:mm:ss:ms</th>" + "<th>nanosec</th>" + "<th>who</th>" + "<th>I/O Kind</th>" + "<th>event</th>" + "<th>bytes</th>" + "</thead>"; } @Override public Builder defaultBuilder(JsonElement element) { JsonObject obj = (JsonObject)element; if( obj.get(JSON_TYPE).getAsString().equals("heartbeat") ) return new HeartbeatEventRowBuilder(); return new BasicEventRowBuild(); } } private static class HeartbeatEventRowBuilder extends ArrayRowBuilder { @Override public String build(Response response, JsonObject object, String contextName) { String name = elementName(contextName); StringBuilder sb = new StringBuilder(); sb.append(caption(object, name)); sb.append(header(object, name)); sb.append("<td>").append(object.get(JSON_LAST_TIME).getAsString()).append("</td>"); sb.append("<td>lots</td>"); sb.append("<td>many -> many</td>"); sb.append("<td>UDP</td>"); sb.append("<td>heartbeat</td>"); sb.append("<td>"); sb.append(object.get(JSON_SENDS).getAsLong()).append(" sends, "); sb.append(object.get(JSON_RECVS).getAsLong()).append(" recvs, "); sb.append(object.get(JSON_DROPS).getAsLong()).append(" drops"); sb.append("</td>"); sb.append(footer(object, name)); return sb.toString(); } } private static class BasicEventRowBuild extends ArrayRowBuilder { @Override public String build(Response response, JsonObject object, String contextName) { StringBuilder sb = new StringBuilder(); if( object.get(JSON_DROP) == null ) sb.append("<tr>"); else sb.append("<tr style='background-color:Pink'>"); sb.append("<td>").append(object.get(JSON_TIME).getAsString()).append("</td>"); sb.append("<td>").append(object.get(JSON_NANOS).getAsLong()).append("</td>"); boolean isSend = object.get(JSON_SR).getAsBoolean(); String s = object.get(JSON_SEND).getAsString(); String r = object.get(JSON_RECV).getAsString(); sb.append("<td>"); if( isSend ) sb.append("<b>").append(s).append("</b>"); else sb.append(s); sb.append(" -> "); if( !isSend ) sb.append("<b>").append(r).append("</b>"); else sb.append(r); sb.append("</td>"); sb.append("<td>").append(object.get(JSON_UDPTCP ).getAsString()).append("</td>"); sb.append("<td>").append(object.get(JSON_TYPE ).getAsString()).append("</td>"); sb.append("<td>").append(object.get(JSON_BYTES).getAsString()).append("</td>"); sb.append("</tr>"); return sb.toString(); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/ToEnum2.java
package water.api; import water.Request2; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; public class ToEnum2 extends Request2 { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } @Override protected void registered(RequestServer.API_VERSION version) { super.registered(version); } @API(help="An existing H2O Frame key.", required=true, filter=Default.class) public Frame src_key; @API(help="The column index to perform the factorization on.", required = true, filter=Default.class) public int column_index; @Override protected Response serve() { try { if (column_index <= 0) throw new IllegalArgumentException("Column index is 1 based. Please supply a valid column index in the range [1,"+ src_key.numCols()+"]"); Log.info("Factorizing column " + column_index); Vec nv = src_key.vecs()[column_index - 1].toEnum(); src_key.replace(column_index - 1, nv); } catch( Throwable e ) { return Response.error(e); } return Inspect2.redirect(this, src_key._key.toString()); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/ToInt.java
//package water.api; // // //import water.util.Log; // //public class ToInt extends Request { // protected final H2OExistingKey _key = new H2OExistingKey(KEY); // protected final Int _col_index = new Int(COL_INDEX, -1); // // @Override // protected Response serve() { // try { // Log.info("Integerizing column " + column_index); // // // } catch( Throwable e ) { // return Response.error(e); // } // return // } //}
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/ToInt2.java
package water.api; import water.Request2; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; public class ToInt2 extends Request2 { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } @Override protected void registered(RequestServer.API_VERSION version) { super.registered(version); } @API(help="An existing H2O Frame key.", required=true, filter=Default.class) public Frame src_key; @API(help="The column index to perform the factorization on.", required = true, filter=Default.class) public int column_index; @Override protected Response serve() { try { if (column_index <= 0) throw new IllegalArgumentException("Column index is 1 based. Please supply a valid column index in the range [1,"+ src_key.numCols()+"]"); Log.info("Integerizing column " + column_index); assert src_key.vecs()[column_index - 1].masterVec().isInt(); Vec nv = src_key.vecs()[column_index - 1].masterVec(); src_key.replace(column_index - 1, nv); } catch( Throwable e ) { return Response.error(e); } return Inspect2.redirect(this, src_key._key.toString()); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialDeepLearning.java
package water.api; import hex.deeplearning.DeepLearning; /** * Tutorial about deep learning. * @see DeepLearning */ public class TutorialDeepLearning extends TutorialWorkflow { private final transient TutorWorkflow _wf; private final static String[][] TUTORIAL_STEPS = new String[][]{ /* Title Short Summary File containing step description */ new String[] { "Step 1", "Introduction" , "/tutorials/deeplearning/step1.html" }, new String[] { "Step 2", "Dataset inhale", "/tutorials/deeplearning/step2.html" }, new String[] { "Step 3", "Parsing the dataset", "/tutorials/deeplearning/step3.html" }, new String[] { "Step 4", "Inspecting the dataset", "/tutorials/deeplearning/step4.html" }, new String[] { "Step 5", "Building the model", "/tutorials/deeplearning/step5.html" }, new String[] { "Step 6", "Inspecting the model", "/tutorials/deeplearning/step6.html" }, new String[] { "Step 7", "Predicting on a test set", "/tutorials/deeplearning/step7.html" }, new String[] { "Step 8", "Score the prediction", "/tutorials/deeplearning/step8.html" }, }; public TutorialDeepLearning() { _wf = new TutorWorkflow("Deep Learning Tutorial"); int i = 1; for (String[] info : TUTORIAL_STEPS) { _wf.addStep(i++, new FileTutorStep(info)); } } @Override protected TutorWorkflow getWorkflow() { return _wf; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialGBM.java
package water.api; /** * Basic page introducing tutorial for GBM on Iris */ public class TutorialGBM extends TutorialWorkflow { private final transient TutorWorkflow _wf; private final static String[][] TUTORIAL_STEPS = new String[][]{ /* Title Short Summary File containing step description */ new String[] { "Step 1", "Introduction", "/tutorials/gbm.iris/step1.html" }, new String[] { "Step 2", "Dataset inhale", "/tutorials/gbm.iris/step2.html" }, new String[] { "Step 3", "Parsing the dataset", "/tutorials/gbm.iris/step3.html" }, new String[] { "Step 4", "Inspecting the dataset", "/tutorials/gbm.iris/step4.html" }, new String[] { "Step 5", "Building the model", "/tutorials/gbm.iris/step5.html" }, new String[] { "Step 6", "Inspecting the model", "/tutorials/gbm.iris/step6.html" }, new String[] { "Step 7", "Predict on a test set", "/tutorials/gbm.iris/step7.html" }, new String[] { "Step 8", "Scoring the prediction", "/tutorials/gbm.iris/step8.html" }, }; public TutorialGBM() { _wf = new TutorWorkflow("GBM Tutorial"); int i = 1; for (String[] info : TUTORIAL_STEPS) { _wf.addStep(i++, new FileTutorStep(info)); } } @Override protected TutorWorkflow getWorkflow() { return _wf; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialGLMProstate.java
/** * */ package water.api; /** * Basic page introducing tutorial for GLM on prostate dataset. * * @author michal * */ public class TutorialGLMProstate extends TutorialWorkflow { private final transient TutorWorkflow _wf; private final static String[][] TUTORIAL_STEPS = new String[][]{ /* Title Short Summary File containing step description */ new String[] { "Step 1", "Introduction", "/tutorials/glm.prostate/step1.html" }, new String[] { "Step 2", "Dataset inhale", "/tutorials/glm.prostate/step2.html" }, new String[] { "Step 3", "Parsing the dataset", "/tutorials/glm.prostate/step3.html" }, new String[] { "Step 4", "Inspecting the dataset", "/tutorials/glm.prostate/step4.html" }, new String[] { "Step 5", "Building the model", "/tutorials/glm.prostate/step5.html" }, new String[] { "Step 6", "Inspecting the model", "/tutorials/glm.prostate/step6.html" }, new String[] { "Step 7", "Predict on a test set", "/tutorials/glm.prostate/step7.html" }, new String[] { "Step 8", "Scoring the prediction", "/tutorials/glm.prostate/step8.html" } }; public TutorialGLMProstate() { _wf = new TutorWorkflow("GLM Tutorial"); int i = 1; for (String[] info : TUTORIAL_STEPS) { _wf.addStep(i++, new FileTutorStep(info)); } } @Override protected TutorWorkflow getWorkflow() { return _wf; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialKMeans.java
/** * */ package water.api; /** * Basic page introducing tutorial for GLM on prostate dataset. * * @author michal * */ public class TutorialKMeans extends TutorialWorkflow { private final transient TutorWorkflow _wf; private final static String[][] TUTORIAL_STEPS = new String[][]{ /* Title Short Summary File containing step description */ new String[] { "Step 1", "Introduction", "/tutorials/kmeans/step1.html" }, new String[] { "Step 2", "Dataset inhale & parse", "/tutorials/kmeans/step2.html" }, new String[] { "Step 3", "Running the algorithm", "/tutorials/kmeans/step3.html" }, }; public TutorialKMeans() { _wf = new TutorWorkflow("K-means"); int i = 1; for (String[] info : TUTORIAL_STEPS) { _wf.addStep(i++, new FileTutorStep(info)); } } @Override protected TutorWorkflow getWorkflow() { return _wf; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialRFIris.java
package water.api; /** * Basic page introducing tutorial for Random Forest on Iris * * @author michal */ public class TutorialRFIris extends TutorialWorkflow { private final transient TutorWorkflow _wf; private final static String[][] TUTORIAL_STEPS = new String[][]{ /* Title Short Summary File containing step description */ new String[] { "Step 1", "Introduction", "/tutorials/rf.iris/step1.html" }, new String[] { "Step 2", "Dataset inhale", "/tutorials/rf.iris/step2.html" }, new String[] { "Step 3", "Parsing the dataset", "/tutorials/rf.iris/step3.html" }, new String[] { "Step 4", "Inspecting the dataset", "/tutorials/rf.iris/step4.html" }, new String[] { "Step 5", "Building the model", "/tutorials/rf.iris/step5.html" }, new String[] { "Step 6", "Inspecting the model", "/tutorials/rf.iris/step6.html" }, new String[] { "Step 7", "Predict on a test set", "/tutorials/rf.iris/step7.html" }, new String[] { "Step 8", "Scoring the prediction", "/tutorials/rf.iris/step8.html" }, }; public TutorialRFIris() { _wf = new TutorWorkflow("Random Forest Tutorial"); int i = 1; for (String[] info : TUTORIAL_STEPS) { _wf.addStep(i++, new FileTutorStep(info)); } } @Override protected TutorWorkflow getWorkflow() { return _wf; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TutorialWorkflow.java
package water.api; import java.io.*; import java.util.ArrayList; import java.util.Iterator; import water.Boot; import water.util.Log; /** * Basic page introducing tutorial for Random Forest on Iris * * @author michal * */ abstract public class TutorialWorkflow extends HTMLOnlyRequest { protected final Int _step = new Int(STEP, 1); /** Returns a workflow to show */ protected abstract TutorWorkflow getWorkflow(); @Override protected String build(Response response) { StringBuilder sb = new StringBuilder(); sb.append("<script type='text/javascript' src='tutorials/js/basic.js'></script>"); decorateWorkflow(getWorkflow(), sb, _step.value()); return sb.toString(); } /** Shows the active workflow step */ protected void decorateActiveStep(final TutorStep step, StringBuilder sb) { sb.append("<h4>").append(step.summary()).append("</h4>"); sb.append(step.content()); } protected void decorateWorkflow(final TutorWorkflow twf, StringBuilder sb, int activeStepNum) { int len = twf.length(); TutorStep activeStep = twf.getStep(activeStepNum); // Format tutorial header sb.append("<div class='container' style='margin: 0px auto'>"); sb.append("<h2>").append(twf.title()).append("</h2>"); sb.append("<blockquote><p>").append(activeStep.summary()).append("</p>"); sb.append("<small>Step ").append(activeStepNum).append(" of ").append(len).append("</small>"); sb.append("</blockquote>"); // Container for left, right columns sb.append("<div class='row'>" ); // Append left column with list of tutorial steps sb.append("<div class='span3'>"); sb.append("<table class='table table-stripped'>"); for (TutorStep ts : twf) { sb.append("<tr>"); sb.append("<td><span class='label ").append(activeStepNum == ts.ord ? "label-info" : "" ).append("'>").append(ts.title()).append("</span></td>"); sb.append("<td>"); if (activeStepNum == ts.ord) { sb.append("<strong>").append(ts.summary()).append("</strong>"); } else { sb.append("<a href='").append(getStepUrl(ts.ord)).append("'>"); sb.append(ts.summary()); sb.append("</a>"); } sb.append("</td>"); sb.append("</tr>"); } sb.append("</table>"); sb.append("</div>"); // Close container for left column // Append right column with tutorial step description sb.append("<div class='span7 hero-unit'>"); decorateActiveStep(activeStep, sb); sb.append("</div>"); // Close container for right column sb.append("</div>"); // Close top-level row for left and right columns // Pager in the bottom of left/right column sb.append("<div class='row'>"); sb.append("<div class='span3'>&nbsp;</div>"); sb.append("<div class='span7'>"); sb.append("<ul class='pager'>"); String next = getStepUrl(activeStepNum+1); String prev = getStepUrl(activeStepNum-1); sb.append(activeStepNum > 1 ? "<li><a href='"+prev+"'>Previous</a></li>" : "<li class='disabled'><a href='#'>Previous</a></li>"); sb.append(activeStepNum < len ? "<li><a href='"+next+"'>Next</a></li>" : "<li class='disabled'><a href='#'>Next</a></li>"); sb.append("</ul>"); sb.append("</div>"); sb.append("</div>"); // Close top-level container sb.append("</div>"); } String getStepUrl(int step) { return this.getClass().getSimpleName() + ".html?step=" + step; } /** A simple tutorial workflow representation */ protected class TutorWorkflow implements Iterable<TutorStep> { private final ArrayList<TutorStep> _steps = new ArrayList<TutorialWorkflow.TutorStep>(); private final String _title; public TutorWorkflow(String title) { _title = title; } /** Add a new step into tutorial workflow */ public void addStep(int num, TutorStep step) { _steps.add(step); step.ord = num; } /** Get tutorial step. Step parameter is 1-based. */ public TutorStep getStep(int step) { return _steps.get(step-1); } @Override public Iterator<TutorStep> iterator() { return _steps.iterator(); } public final int length() { return _steps.size(); } public final String title() { return _title; } } /** Simple tutorial step defined by its title, summary, and content. */ protected abstract class TutorStep { int ord; /* Array storing step name, title, and content */ protected final String[] _info; public final String title() { return _info[0]; } public final String summary() { return _info[1]; } /* Override this method to provide the content */ public abstract String content(); public TutorStep(final String[] info) { assert info.length >= 2; _info = info; } } /** Tutorial step stored in file */ protected class FileTutorStep extends TutorStep { private String _content; public FileTutorStep(String[] info) { super(info); assert info.length == 3; } @Override synchronized public final String content() { if (_content == null) _content = loadContent(_info[2]); return _content; } private String loadContent(String fromFile) { BufferedReader reader = null; StringBuilder sb = new StringBuilder(); try { InputStream is = Boot._init.getResource2(fromFile); assert is != null : "Bundled resource " + fromFile + " does not exist!"; reader = new BufferedReader(new InputStreamReader(is)); String line = null; while( (line = reader.readLine())!=null) sb.append(line).append('\n'); } catch( IOException e ) { /* Silently ignoring */ Log.err(e); } finally { if (reader!=null) try { reader.close(); } catch( IOException e ) { throw new RuntimeException(Log.err("IOException during reader close.",e)); } } return sb.toString(); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Tutorials.java
package water.api; import water.AbstractBuildVersion; import water.H2O; /** * Summary page referencing all tutorials. * * @author michal */ public class Tutorials extends HTMLOnlyRequest { @Override protected String build(Response response) { AbstractBuildVersion abv = H2O.getBuildVersion(); String branchName = abv.branchName(); String buildNumber = abv.buildNumber(); String documentationUrl = "http://s3.amazonaws.com/h2o-release/h2o/" + branchName + "/" + buildNumber + "/docs-website"; String RPackageDocumentationUrl = documentationUrl + "/Ruser/top.html"; return "<div class='container'>" + "<div>" + "<h1>Get started with H<sub>2</sub>O tutorials!</h1>" + "<p class='text-center'>" + "<a href='http://h2oworld.eventbrite.com/' class='btn btn-large btn-warning' type='button'>Reserve your spot!</a>" + "</p>" + "<p class='text-center'>" + "<img src='img/banners/h2o_world_banner.png' /> " + "</p>" + "</div>" + "<div class='row'>" + "<div class='span2 col'>" + "<h2>Use H<sub>2</sub>O from R</h2>" + "<div style='background-color:#006dcc;color:white;background-image:linear-gradient(to bottom,#08c,#04c);text-align:center;font-size:70px;font-weight:bold;height:100px;line-height:100px;border-radius:15px;max-width:110px;margin-bottom:5px'>R</div>" + "<p>H<sub>2</sub>O supports both R and R Studio.</p>" + "<a href='" + RPackageDocumentationUrl + "' class='btn btn-primary'>Try it!</a>" + "</div>" + "<div class='span2 col'>" + " <h2>Random Forest</h2>" + "<p>Random Forest is a classical machine learning method for classification and regression. Learn how to use it with H<sub>2</sub>O for better predictions.</it></p>" + "<a href='/TutorialRFIris.html' class='btn btn-primary'>Try it!</a>" + "</div>" + "<div class='span2 col'>" + " <h2>GBM</h2>" + "<p>GBM uses gradient boosted trees for classification and regression, and is one of the most powerful machine learning methods in H<sub>2</sub>O.</p>" + "<a href='/TutorialGBM.html' class='btn btn-primary'>Try it!</a>" + "</div>" + "<div class='span2 col'>" + "<h2>GLM</h2>" + "<p>Generalized linear model is a generalization of linear regression. Experience its unique power and blazing speed on top of H<sub>2</sub>O.</p>" + "<a href='/TutorialGLMProstate.html' class='btn btn-primary'>Try it!</a>" + "</div>" + "<div class='span2 col'>" + "<h2>K-Means</h2>" + "<p>Perform clustering analysis with H<sub>2</sub>O. K-means is a highly scalable clustering algorithm for unsupervised learning on big data.</p>" + "<a href='/TutorialKMeans.html' class='btn btn-primary'>Try it!</a>" + "</div>" + "<div class='span2 col'>" + "<h2>Deep Learning</h2>" + "<p>H<sub>2</sub>O's distributed Deep Learning gives you the power of deep neural networks for highest accuracy for classification and regression.</p>" + "<a href='/TutorialDeepLearning.html' class='btn btn-primary'>Try it!</a>" + "</div>" + "</div>" + "</div>"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TypeaheadFileRequest.java
package water.api; import java.io.File; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import water.persist.PersistHdfs; import water.persist.PersistS3; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.Bucket; import com.google.common.base.Strings; import dontweave.gson.JsonArray; import dontweave.gson.JsonPrimitive; public class TypeaheadFileRequest extends TypeaheadRequest { public TypeaheadFileRequest() { super("Provides a simple JSON array of filtered local files.",""); } protected JsonArray serveFile(String filter, int limit){ File base = null; String filterPrefix = ""; if( !filter.isEmpty() ) { File file = new File(filter); if( file.isDirectory() ) { base = file; } else { base = file.getParentFile(); filterPrefix = file.getName().toLowerCase(); } } if( base == null ) base = new File("."); JsonArray array = new JsonArray(); File[] files = base.listFiles(); if( files == null ) return array; for( File file : files ) { if( file.isHidden() ) continue; if( file.getName().toLowerCase().startsWith(filterPrefix) ) array.add(new JsonPrimitive(file.getPath())); if( array.size() == limit) break; } return array; } protected JsonArray serveHdfs(String filter, int limit){ JsonArray array = new JsonArray(); Configuration conf = PersistHdfs.CONF; if( conf == null ) return array; try { Path p = new Path(filter); Path expand = p; if( !filter.endsWith("/") ) expand = p.getParent(); FileSystem fs = FileSystem.get(p.toUri(), conf); for( FileStatus file : fs.listStatus(expand) ) { Path fp = file.getPath(); if( fp.toString().startsWith(p.toString()) ) { array.add(new JsonPrimitive(fp.toString())); } if( array.size() == limit) break; } } catch( Throwable xe ) { } return array; } protected JsonArray serveS3(String filter, int limit){ JsonArray array = new JsonArray(); try { AmazonS3 s3 = PersistS3.getClient(); filter = Strings.nullToEmpty(filter); for( Bucket b : s3.listBuckets() ) { if( b.getName().startsWith(filter) ) array.add(new JsonPrimitive(b.getName())); if( array.size() == limit) break; } } catch( IllegalArgumentException xe ) { } return array; } @Override final protected JsonArray serve(String filter, int limit) { final String lcaseFilter = filter.toLowerCase(); if(lcaseFilter.startsWith("hdfs://") || lcaseFilter.startsWith("s3n://"))return serveHdfs(filter, limit); if(lcaseFilter.startsWith("s3://")) return serveS3(filter.substring(5), limit); return serveFile(filter,limit); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TypeaheadHdfsPathRequest.java
package water.api; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import water.persist.PersistHdfs; import dontweave.gson.JsonArray; import dontweave.gson.JsonPrimitive; public class TypeaheadHdfsPathRequest extends TypeaheadRequest { public TypeaheadHdfsPathRequest() { super("Provides a simple JSON array of HDFS Buckets.",""); } @Override protected JsonArray serve(String filter, int limit) { JsonArray array = new JsonArray(); Configuration conf = PersistHdfs.CONF; if( conf == null ) return array; try { Path p = new Path(filter); Path expand = p; if( !filter.endsWith("/") ) expand = p.getParent(); FileSystem fs = FileSystem.get(p.toUri(), conf); for( FileStatus file : fs.listStatus(expand) ) { Path fp = file.getPath(); if( fp.toString().startsWith(p.toString()) ) { array.add(new JsonPrimitive(fp.toString())); } if( array.size() == limit) break; } } catch( Throwable xe ) { } return array; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TypeaheadKeysRequest.java
package water.api; import hex.nb.NBModel; import hex.pca.PCAModel; import hex.*; import water.*; import water.fvec.Frame; import dontweave.gson.JsonArray; import dontweave.gson.JsonPrimitive; public class TypeaheadKeysRequest extends TypeaheadRequest { final String _cname; int _typeid; // Also filter for Keys of this type public TypeaheadKeysRequest(String msg, String filter, Class C) { super(msg, filter); _cname = C == null ? null : C.getName(); } @Override protected JsonArray serve(String filter, int limit) { return serve(filter, limit, 2000); } protected JsonArray serve(String filter, int limit, long timetolerance) { JsonArray array = new JsonArray(); int len = 0; // Gather some keys that pass all filters for( H2O.KeyInfo kinfo : H2O.KeySnapshot.globalSnapshot(2000)._keyInfos) { if( filter != null && // Have a filter? kinfo._key.toString().indexOf(filter) == -1 ) continue; // Ignore this filtered-out key if( !matchesType(kinfo) ) continue; // Wrong type? if( !shouldIncludeKey(kinfo) ) continue; // Generic override array.add(new JsonPrimitive(kinfo._key.toString())); if(array.size() == limit)break; } return array; } protected boolean matchesType(H2O.KeyInfo ki) { // No type filtering if( _typeid == 0 && _cname == null ) return true; // One-shot monotonic racey update from 0 to the known fixed typeid. // Since all writers write the same typeid, there is no race. if( _typeid == 0 ) _typeid = TypeMap.onIce(_cname); if( ki._type == _typeid ) return true; // Class Model is abstract, and TypeMap clazz() does not handle that well. // Also, want to allow both OldModel & Model. // Hack: check for water.Model and name the class directly. Class kclz = TypeMap.clazz(ki._type); if( TypeMap.className(_typeid).equals("water.Model") ) return Model.class.isAssignableFrom(kclz); return TypeMap.clazz(_typeid).isAssignableFrom(kclz); } // By default, all keys passing filters protected boolean shouldIncludeKey(H2O.KeyInfo k) { return true; } } class TypeaheadModelKeyRequest extends TypeaheadKeysRequest { public TypeaheadModelKeyRequest() { super("Provides a simple JSON array of filtered keys known to the "+ "current node that are Models at the time of calling.", null,Model.class); } } class TypeaheadPCAModelKeyRequest extends TypeaheadKeysRequest { public TypeaheadPCAModelKeyRequest() { super("Provides a simple JSON array of filtered keys known to the "+ "current node that are PCAModels at the time of calling.", null,PCAModel.class); } } class TypeaheadNBModelKeyRequest extends TypeaheadKeysRequest { public TypeaheadNBModelKeyRequest() { super("Provides a simple JSON array of filtered keys known to the "+ "current node that are NBModels at the time of calling.", null,NBModel.class); } } class TypeaheadHexKeyRequest extends TypeaheadKeysRequest { public TypeaheadHexKeyRequest() { super("Provides a simple JSON array of filtered keys known to the "+ "current node that are Frames at the time of calling.", null,Frame.class); } @Override protected boolean matchesType(H2O.KeyInfo kinfo) { return !kinfo._rawData && (kinfo._type == TypeMap.FRAME); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TypeaheadRequest.java
package water.api; import water.api.RequestServer.API_VERSION; import dontweave.gson.JsonArray; import dontweave.gson.JsonObject; public abstract class TypeaheadRequest extends Request { protected final Str _filter; protected final Int _limit; public TypeaheadRequest(String help, String filter) { _requestHelp = help; _filter = new Str(FILTER,filter); _filter._requestHelp = "Only items matching this filter will be returned."; _limit = new Int(LIMIT,1024,0,10240); _limit._requestHelp = "Max number of items to be returned."; } @Override final protected Response serve() { JsonArray array = serve(_filter.value(), _limit.value()); JsonObject response = new JsonObject(); response.add(ITEMS, array); return Response.done(response); } @Override protected boolean log() { return false; } abstract protected JsonArray serve(String filter, int limit); @Override public API_VERSION[] supportedVersions() { return SUPPORTS_V1_V2; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/TypeaheadS3BucketRequest.java
package water.api; import water.persist.PersistS3; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.Bucket; import com.google.common.base.Strings; import dontweave.gson.JsonArray; import dontweave.gson.JsonPrimitive; public class TypeaheadS3BucketRequest extends TypeaheadRequest { public TypeaheadS3BucketRequest() { super("Provides a simple JSON array of S3 paths.",""); } @Override protected JsonArray serve(String filter, int limit) { JsonArray array = new JsonArray(); try { AmazonS3 s3 = PersistS3.getClient(); filter = Strings.nullToEmpty(filter); for( Bucket b : s3.listBuckets() ) { if( b.getName().startsWith(filter) ) array.add(new JsonPrimitive(b.getName())); if( array.size() == limit) break; } } catch( IllegalArgumentException xe ) { } return array; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/UnlockKeys.java
package water.api; import water.*; import water.util.Log; public class UnlockKeys extends Request2 { @Override public Response serve() { try { Log.info("Unlocking all locked keys on the cluster."); new UnlockTask().invokeOnAllNodes(); } catch( Throwable e ) { return Response.error(e); } return Response.done(this); } public class UnlockTask extends DRemoteTask<UnlockTask> { @Override public void reduce(UnlockTask drt) { } @Override public byte priority() { return H2O.GUI_PRIORITY; } @Override public void lcompute() { final H2O.KeyInfo[] kinfo = H2O.KeySnapshot.localSnapshot(true)._keyInfos; for(H2O.KeyInfo k:kinfo) { if(!k.isLockable()) continue; final Value val = DKV.get(k._key); if( val == null ) continue; final Object obj = val.rawPOJO(); if( obj == null ) continue; //need to have a POJO to be locked final Lockable<?> lockable = (Lockable<?>)(obj); final Key[] lockers = ((Lockable) obj)._lockers; if (lockers != null) { // check that none of the locking jobs is still running for (Key locker : lockers) { if (locker != null && locker.type() == Key.JOB) { final Job job = UKV.get(locker); if (job != null && job.isRunning()) throw new UnsupportedOperationException("Cannot unlock all keys since locking jobs are still running."); } } lockable.unlock_all(); Log.info("Unlocked key '" + k._key + "' from " + lockers.length + " lockers."); } } Log.info("All keys are now unlocked."); tryComplete(); } } @Override public boolean toHTML(StringBuilder sb) { DocGen.HTML.paragraph(sb, "All keys are now unlocked."); return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Up.java
package water.api; import dontweave.gson.JsonObject; import water.util.Log; public class Up extends Request { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_V1_V2; } @Override protected Response serve() { JsonObject response = new JsonObject(); return Response.done(response); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Upload.java
package water.api; import dontweave.gson.JsonObject; public class Upload extends HTMLOnlyRequest { @Override protected String build(Response response) { return "" + "<script type='text/javascript' src='jquery.fileupload/js/api_v1.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/vendor/jquery.ui.widget.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/jquery.iframe-transport.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/jquery.fileupload.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/main.js'></script>" + "<div class='container' style='margin: 0px auto'>" + "<h3>Request Upload <a href='Upload.help'><i class='icon-question-sign'></i></a></h3>" + "<p>Please specify the file to be uploaded.</p>" + "<form id='Fileupload'>" + " <span class='btn but-success fileinput-button'>" + " <i class='icon-plus icon-white'></i>" + " <span>Select file...</span>" + " <input type='file'>" + " </span>" + "</form>" + "<table class='table' style='border:0px' id='UploadTable'>" + "</table>" + "</div>"; } // Here is an example of how to upload a file from the command line. // // curl -v -F "file=@allyears2k_headers.zip" "http://localhost:54321/PostFile.json?key=a.zip" // // This call is handled as a POST request in method NanoHTTPD#fileUpload public static class PostFile extends JSONOnlyRequest { // dummy parameter H2OKey key = new H2OKey(KEY,true); @Override protected Response serve() { return Response.done(new JsonObject()); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/Upload2.java
package water.api; import dontweave.gson.JsonObject; public class Upload2 extends HTMLOnlyRequest { @Override protected String build(Response response) { return "" + "<script type='text/javascript' src='jquery.fileupload/js/api_v2.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/vendor/jquery.ui.widget.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/jquery.iframe-transport.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/jquery.fileupload.js'></script>" + "<script type='text/javascript' src='jquery.fileupload/js/main.js'></script>" + "<div class='container' style='margin: 0px auto'>" + "<h3>Request Upload <a href='Upload.help'><i class='icon-question-sign'></i></a></h3>" + "<p>Please specify the file to be uploaded.</p>" + "<form id='Fileupload'>" + " <span class='btn but-success fileinput-button'>" + " <i class='icon-plus icon-white'></i>" + " <span>Select file...</span>" + " <input type='file'>" + " </span>" + "</form>" + "<table class='table' style='border:0px' id='UploadTable'>" + "</table>" + "</div>"; } // Here is an example of how to upload a file from the command line. // // curl -v -F "file=@allyears2k_headers.zip" "http://localhost:54321/2/PostFile.json?key=a.zip" // // This call is handled as a POST request in method NanoHTTPD#fileUpload public static class PostFile extends JSONOnlyRequest { @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } /** * Iterates over fields and their annotations, and creates argument handlers. */ @Override protected void registered(RequestServer.API_VERSION version) { super.registered(version); } // dummy parameter H2OKey key = new H2OKey(KEY,true); @Override protected Response serve() { return Response.done(new JsonObject()); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/api/WaterMeterPerfbar.java
package water.api; import dontweave.gson.*; import water.*; import water.util.LinuxProcFileReader; import water.util.Log; /** * Redirect to water meter page. */ public class WaterMeterPerfbar extends HTMLOnlyRequest { protected String build(Response response) { return "" + "<div class='container' id='perfbarContainer'>" + "<script>" + "var PB_LINEOFTEXT_BACKGROUND_COLOR = \"#fff\";" + "</script>" + "<script src=\"watermeter/perfbar.js\"></script>" + "</div>"; } public static class WaterMeterCpuTicks extends JSONOnlyRequest { Int node_idx = new Int("node_idx", -1); @Override public RequestServer.API_VERSION[] supportedVersions() { return SUPPORTS_ONLY_V2; } /** * Iterates over fields and their annotations, and creates argument handlers. */ @Override protected void registered(RequestServer.API_VERSION version) { super.registered(version); } private static class GetTicksTask extends DTask<GetTicksTask> { private long[][] _cpuTicks; public GetTicksTask() { _cpuTicks = null; } @Override public void compute2() { LinuxProcFileReader lpfr = new LinuxProcFileReader(); lpfr.read(); if (lpfr.valid()) { _cpuTicks = lpfr.getCpuTicks(); } else { // In the case where there isn't any tick information, the client receives a json // response object containing an array of length 0. // // e.g. // { cpuTicks: [] } _cpuTicks = new long[0][0]; } tryComplete(); } @Override public byte priority() { return H2O.MIN_HI_PRIORITY; } } @Override protected Response serve() { if ((node_idx.value() < 0) || (node_idx.value() >= H2O.CLOUD.size())) { throw new IllegalArgumentException("Illegal node_idx for this H2O cluster (must be from 0 to " + H2O.CLOUD.size() + ")"); } H2ONode node = H2O.CLOUD._memary[node_idx.value()]; GetTicksTask ppt = new GetTicksTask(); Log.trace("GetTicksTask starting to node " + node_idx.value() + "..."); // Synchronous RPC call to get ticks from remote (possibly this) node. new RPC<GetTicksTask>(node, ppt).call().get(); Log.trace("GetTicksTask completed to node " + node_idx.value()); long[][] cpuTicks = ppt._cpuTicks; // Stuff tick information into json response. JsonArray j = new JsonArray(); for (long[] arr : cpuTicks) { JsonArray j2 = new JsonArray(); j2.add(new JsonPrimitive(arr[0])); j2.add(new JsonPrimitive(arr[1])); j2.add(new JsonPrimitive(arr[2])); j2.add(new JsonPrimitive(arr[3])); j.add(j2); } JsonObject o = new JsonObject(); o.add("cpuTicks", j); return Response.done(o); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water/api
java-sources/ai/h2o/h2o-classic/2.8/water/api/anno/RESTCall.java
package water.api.anno; import java.lang.annotation.*; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) @Documented public @interface RESTCall { /** Schema location */ String location(); /** Endpoint */ String path(); /** Call method */ String method() default "GET"; }
0
java-sources/ai/h2o/h2o-classic/2.8/water/api
java-sources/ai/h2o/h2o-classic/2.8/water/api/handlers/ModelBuildersMetadataHandlerV1.java
package water.api.handlers; import water.H2O; import water.api.Handler; import water.schemas.ModelBuildersMetadataV1; import java.util.ArrayList; import java.util.List; public class ModelBuildersMetadataHandlerV1 extends Handler<ModelBuildersMetadataHandlerV1, ModelBuildersMetadataV1> { public List<ModelBuildersMetadataV1> list() { return new ArrayList<ModelBuildersMetadataV1>(); } @Override public void compute2() { // Weh?!? } public ModelBuildersMetadataV1 show() { return new ModelBuildersMetadataV1(); } @Override protected ModelBuildersMetadataV1 schema(int version) { switch (version) { case 1: return new ModelBuildersMetadataV1(); default: throw H2O.fail("Unknown schema version: " + version + " for handler class: " + this.getClass()); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/Cloud.java
package water.deploy; import java.io.File; import java.io.Serializable; import java.util.*; import water.*; import water.H2O.FlatFileEntry; import water.deploy.VM.Params; import water.deploy.VM.Watchdog; import water.util.Log; import water.util.Utils; /** * Deploys and starts a remote cluster. * <br> * Note: This class is intended for debug and experimentation purposes only, please refer to the * documentation to run an H2O cluster. */ public class Cloud { public final List<String> publicIPs = new ArrayList<String>(); public final List<String> privateIPs = new ArrayList<String>(); /** Includes for rsync to the master */ public final Set<String> clientRSyncIncludes = new HashSet<String>(); /** Excludes for rsync to the master */ public final Set<String> clientRSyncExcludes = new HashSet<String>(); /** Includes for rsync between the master and slaves */ public final Set<String> fannedRSyncIncludes = new HashSet<String>(); /** Excludes for rsync between the master and slaves */ public final Set<String> fannedRSyncExcludes = new HashSet<String>(); /** Port for all remote machines. */ public static final int PORT = 54423; public static final int FORWARDED_LOCAL_PORT = 54321; /** * To avoid configuring remote machines, a JVM can be sent through rsync with H2O. By default, * decompress the Oracle Linux x64 JDK to a local folder and point this path to it. */ static final String JRE = null; // System.getProperty("user.home") + "/libs/jdk/jre"; /** Watch dogs are additional JVMs that shutdown the cluster when the client is killed */ static final boolean WATCHDOGS = true; static final String FLATFILE = "flatfile"; public void start(String[] java_args, String[] args) { // Take first box as cloud master Host master = new Host(publicIPs.get(0)); Set<String> incls = new HashSet<String>(clientRSyncIncludes); if( JRE != null && !new File(JRE + "/bin/java").exists() ) throw new IllegalArgumentException("Invalid JRE"); if( JRE != null ) incls.add(JRE); List<String> ips = privateIPs.size() > 0 ? privateIPs : publicIPs; String s = ""; for( Object o : ips ) s += (s.length() == 0 ? "" : '\n') + o.toString() + ":" + PORT; File flatfile = Utils.writeFile(new File(Utils.tmp(), FLATFILE), s); incls.add(flatfile.getAbsolutePath()); master.rsync(incls, clientRSyncExcludes, false); ArrayList<String> list = new ArrayList<String>(); list.add("-mainClass"); list.add(Master.class.getName()); CloudParams p = new CloudParams(); p._incls = new HashSet<String>(fannedRSyncIncludes); p._excls = fannedRSyncExcludes; p._incls.add(FLATFILE); if( JRE != null ) p._incls.add(new File(JRE).getName()); list.add(VM.write(p)); list.addAll(Arrays.asList(args)); String[] java = Utils.append(java_args, NodeVM.class.getName()); Params params = new Params(master, java, list.toArray(new String[0])); if( WATCHDOGS ) { SSHWatchdog r = new SSHWatchdog(params); r.inheritIO(); r.start(); } else { try { SSHWatchdog.run(params); } catch( Exception e ) { throw new RuntimeException(e); } } } static class CloudParams implements Serializable { Set<String> _incls, _excls; } static class SSHWatchdog extends Watchdog { public SSHWatchdog(Params p) { super(javaArgs(SSHWatchdog.class.getName()), new String[] { write(p) }); } public static void main(String[] args) throws Exception { exitWithParent(); Params p = read(args[0]); run(p); } static void run(Params p) throws Exception { Host host = new Host(p._host[0], p._host[1], p._host[2]); String key = host.key() != null ? host.key() : ""; String s = "ssh-agent sh -c \"ssh-add " + key + "; ssh -l " + host.user() + " -A" + Host.SSH_OPTS; s += " -L " + FORWARDED_LOCAL_PORT + ":127.0.0.1:" + PORT; // Port forwarding s += " " + host.address() + " '" + SSH.command(p._java, p._node) + "'\""; s = s.replace("\\", "\\\\").replace("$", "\\$"); ArrayList<String> list = new ArrayList<String>(); // Have to copy to file for cygwin, but works also on -nix File sh = Utils.writeFile(s); File onWindows = new File("C:/cygwin/bin/bash.exe"); if( onWindows.exists() ) { list.add(onWindows.getPath()); list.add("--login"); } else list.add("bash"); list.add(sh.getAbsolutePath()); exec(list); } } public static class Master { public static void main(String[] args) throws Exception { VM.exitWithParent(); CloudParams params = VM.read(args[0]); args = Utils.remove(args, 0); String[] workerArgs = new String[] { "-flatfile", FLATFILE, "-port", "" + PORT }; List<FlatFileEntry> flatfile = H2O.parseFlatFile(new File(FLATFILE)); HashMap<String, Host> hosts = new HashMap<String, Host>(); ArrayList<Node> workers = new ArrayList<Node>(); for( int i = 1; i < flatfile.size(); i++ ) { Host host = new Host(flatfile.get(i).inet.getHostAddress()); hosts.put(host.address(), host); workers.add(new NodeHost(host, workerArgs)); } Host.rsync(hosts.values().toArray(new Host[0]), params._incls, params._excls, false); for( Node w : workers ) { w.inheritIO(); w.start(); } H2O.main(Utils.append(workerArgs, args)); stall_till_cloudsize(1 + workers.size(), 10000); // stall for cloud 10seconds Log.unwrap(System.out, ""); Log.unwrap(System.out, "Cloud is up, local port " + FORWARDED_LOCAL_PORT + " forwarded"); Log.unwrap(System.out, "Go to http://127.0.0.1:" + FORWARDED_LOCAL_PORT); Log.unwrap(System.out, ""); int index = Arrays.asList(args).indexOf("-mainClass"); if( index >= 0 ) { String pack = args[index + 1].substring(0, args[index + 1].lastIndexOf('.')); LaunchJar.weavePackages(pack); Boot.run(args); } } public static void stall_till_cloudsize(int x, long ms) { H2O.waitForCloudSize(x, ms); UKV.put(Job.LIST, new Job.List()); // Jobs.LIST must be part of initial keys } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/EC2.java
package water.deploy; import java.io.*; import java.util.*; import org.apache.commons.codec.binary.Base64; import water.H2O; import water.persist.PersistS3; import water.util.Log; import water.util.Utils; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.PropertiesCredentials; import com.amazonaws.services.ec2.AmazonEC2Client; import com.amazonaws.services.ec2.model.*; /** * Manages EC2 instances. * <br> * Note: This class is intended for debug and experimentation purposes only, please refer to the * documentation to run H2O on AWS. */ public class EC2 { private static final String USER = System.getProperty("user.name"); private static final String NAME = USER + "-H2O-Cloud"; public int boxes; public String region = "us-east-1"; //public String image = "ami-3565305c"; // Amazon Linux, x64, Instance-Store, US East N. Virginia //public String image = "ami-dfbfe4b6"; // Amazon Linux, x64, HVM, Instance-Store, US East N. Virginia //public String image = "ami-e1357b88"; // Ubuntu Raring 13.04 amd64 public String image = "ami-09614460"; // Ubuntu Raring 13.04 amd64 HVM //public String type = "m1.xlarge"; public String type = "cc2.8xlarge"; // HPC public String securityGroup = "ssh"; // "default"; public boolean confirm = true; //@formatter:off static String cloudConfig = "" + // TODO try Amazon AMI "Enhanced Networking" l("#cloud-config") + l("users:") + l(" - name: " + USER) + l(" sudo: ALL=(ALL) NOPASSWD:ALL") + l(" ssh-authorized-keys:") + l(" - " + pubKey()) + l(" shell: /bin/bash") + l("") + l("runcmd:") + l(" - iptables -I INPUT -p tcp --dport 22 -j DROP") + // l(" - echo 'fs.file-max = 524288' > /etc/sysctl.d/increase-max-fd.conf") + // l(" - sysctl -w fs.file-max=524288") + // l(" - echo '* soft nofile 524288' > /etc/security/limits.d/increase-max-fd-soft.conf") + // l(" - echo '* hard nofile 524288' > /etc/security/limits.d/increase-max-fd-hard.conf") + // l(" - apt-get update") + l(" - apt-get -y install openjdk-7-jdk") + // l(" - apt-get -y install openvpn") + l(" - iptables -D INPUT 1") + l(""); static String l(String line) { return line + "\n"; } //@formatter:on /** * Create or terminate EC2 instances. Uses their Name tag to find existing ones. */ public Cloud resize() throws Exception { AmazonEC2Client ec2 = new AmazonEC2Client(new PersistS3.H2OAWSCredentialsProviderChain()); ec2.setEndpoint("ec2." + region + ".amazonaws.com"); DescribeInstancesResult describeInstancesResult = ec2.describeInstances(); List<Reservation> reservations = describeInstancesResult.getReservations(); List<Instance> instances = new ArrayList<Instance>(); for( Reservation reservation : reservations ) { for( Instance instance : reservation.getInstances() ) { String ip = ip(instance); if( ip != null ) { String name = null; if( instance.getTags().size() > 0 ) name = instance.getTags().get(0).getValue(); if( NAME.equals(name) ) instances.add(instance); } } } System.out.println("Found " + instances.size() + " EC2 instances for user " + USER); if( instances.size() > boxes ) { for( int i = 0; i < instances.size() - boxes; i++ ) { // TODO terminate? } } else if( instances.size() < boxes ) { int launchCount = boxes - instances.size(); System.out.println("Creating " + launchCount + " EC2 instances."); if( confirm ) { System.out.println("Please confirm [y/n]"); String s = Utils.readConsole(); if( s == null || !s.equalsIgnoreCase("y") ) throw new Exception("Aborted"); } CreatePlacementGroupRequest group = new CreatePlacementGroupRequest(); group.withGroupName(USER); group.withStrategy(PlacementStrategy.Cluster); try { ec2.createPlacementGroup(group); } catch( AmazonServiceException ex ) { if( !"InvalidPlacementGroup.Duplicate".equals(ex.getErrorCode()) ) throw ex; } RunInstancesRequest run = new RunInstancesRequest(); run.withInstanceType(type); run.withImageId(image); run.withMinCount(launchCount).withMaxCount(launchCount); run.withSecurityGroupIds(securityGroup); Placement placement = new Placement(); placement.setGroupName(USER); run.withPlacement(placement); BlockDeviceMapping map = new BlockDeviceMapping(); map.setDeviceName("/dev/sdb"); map.setVirtualName("ephemeral0"); run.withBlockDeviceMappings(map); run.withUserData(new String(Base64.encodeBase64(cloudConfig.getBytes()))); RunInstancesResult runRes = ec2.runInstances(run); ArrayList<String> ids = new ArrayList<String>(); for( Instance instance : runRes.getReservation().getInstances() ) ids.add(instance.getInstanceId()); List<Instance> created = wait(ec2, ids); System.out.println("Created " + created.size() + " EC2 instances."); instances.addAll(created); } String[] pub = new String[boxes]; String[] prv = new String[boxes]; for( int i = 0; i < boxes; i++ ) { pub[i] = instances.get(i).getPublicIpAddress(); prv[i] = instances.get(i).getPrivateIpAddress(); } System.out.println("EC2 public IPs: " + Utils.join(' ', pub)); System.out.println("EC2 private IPs: " + Utils.join(' ', prv)); Cloud cloud = new Cloud(); cloud.publicIPs.addAll(Arrays.asList(pub)); cloud.privateIPs.addAll(Arrays.asList(prv)); return cloud; } private static String pubKey() { BufferedReader r = null; try { String pub = System.getProperty("user.home") + "/.ssh/id_rsa.pub"; r = new BufferedReader(new FileReader(new File(pub))); return r.readLine(); } catch( IOException e ) { throw Log.errRTExcept(e); } finally { if( r != null ) try { r.close(); } catch( IOException e ) { throw Log.errRTExcept(e); } } } private List<Instance> wait(AmazonEC2Client ec2, List<String> ids) { System.out.println("Establishing ssh connections, make sure security group '" // + securityGroup + "' allows incoming TCP 22."); boolean tagsDone = false; for( ;; ) { try { if( !tagsDone ) { CreateTagsRequest createTagsRequest = new CreateTagsRequest(); createTagsRequest.withResources(ids).withTags(new Tag("Name", NAME)); ec2.createTags(createTagsRequest); tagsDone = true; } DescribeInstancesRequest request = new DescribeInstancesRequest(); request.withInstanceIds(ids); DescribeInstancesResult result = ec2.describeInstances(request); List<Reservation> reservations = result.getReservations(); List<Instance> instances = new ArrayList<Instance>(); for( Reservation reservation : reservations ) for( Instance instance : reservation.getInstances() ) if( ip(instance) != null ) instances.add(instance); if( instances.size() == ids.size() ) { // Try to connect to SSH port on each box if( canConnect(instances) ) return instances; } } catch( AmazonServiceException xe ) { // Ignore and retry } try { Thread.sleep(500); } catch( InterruptedException e ) { throw Log.errRTExcept(e); } } } private static String ip(Instance instance) { String ip = instance.getPublicIpAddress(); if( ip != null && ip.length() != 0 ) if( instance.getState().getName().equals("running") ) return ip; return null; } private static boolean canConnect(List<Instance> instances) { for( Instance instance : instances ) { try { String ssh = "ssh -q" + Host.SSH_OPTS + " " + instance.getPublicIpAddress(); Process p = Runtime.getRuntime().exec(ssh + " exit"); if( p.waitFor() != 0 ) return false; } catch( Exception e ) { return false; } finally { } } return true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/Host.java
package water.deploy; import java.io.File; import java.util.ArrayList; import java.util.Set; import water.H2O; import water.util.Log; import water.util.Utils; public class Host { public static final String SSH_OPTS; static { SSH_OPTS = "" // + " -o UserKnownHostsFile=/dev/null" // + " -o StrictHostKeyChecking=no" // + " -o LogLevel=quiet" // + " -o ServerAliveInterval=15" // + " -o ServerAliveCountMax=3"; } public static final String FOLDER = "h2o_rsync"; private final String _address, _user, _key; public Host(String addr) { this(addr, null); } public Host(String addr, String user) { this(addr, user, null); } public Host(String addr, String user, String key) { _address = addr; _user = user != null ? user : System.getProperty("user.name"); _key = key; } public String address() { return _address; } public String user() { return _user; } public String key() { return _key; } public void rsync(Set<String> includes, Set<String> excludes, boolean delete) { rsync(includes, excludes, delete, FOLDER); } public void rsync(Set<String> includes, Set<String> excludes, boolean delete, String folder) { Process process = null; try { ArrayList<String> args = new ArrayList<String>(); args.add("rsync"); args.add("-vrzute"); args.add(sshWithArgs()); args.add("--chmod=u=rwx"); for( String s : includes ) args.add(new File(s).getCanonicalPath()); // --exclude seems ignored on Linux (?) so use --exclude-from File file = Utils.writeFile(Utils.join('\n', excludes)); args.add("--exclude-from"); args.add(file.getCanonicalPath()); if( delete ) args.add("--delete"); args.add(_address + ":" + "~" + _user + "/" + folder); ProcessBuilder builder = new ProcessBuilder(args); process = builder.start(); String log = "rsync " + H2O.findInetAddressForSelf() + " -> " + _address; NodeVM.inheritIO(process, Log.padRight(log + ": ", 24)); process.waitFor(); } catch( Exception ex ) { throw new RuntimeException(ex); } finally { if( process != null ) { try { process.destroy(); } catch( Exception xe ) { // Ignore } } } } public static void rsync(final Host[] hosts, final Set<String> includes, final Set<String> excludes, final boolean delete) { ArrayList<Thread> threads = new ArrayList<Thread>(); for( int i = 0; i < hosts.length; i++ ) { final int i_ = i; Thread t = new Thread() { @Override public void run() { hosts[i_].rsync(includes, excludes, delete); } }; t.setDaemon(true); t.start(); threads.add(t); } for( Thread t : threads ) { try { t.join(); } catch( InterruptedException e ) { throw Log.errRTExcept(e); } } } String sshWithArgs() { String k = ""; if( _key != null ) { assert new File(_key).exists(); // Git doesn't set permissions, so force them each time try { Process p = Runtime.getRuntime().exec("chmod 600 " + _key); p.waitFor(); } catch( Exception e ) { throw Log.errRTExcept(e); } k = " -i " + _key; } return "ssh -l " + _user + " -A" + k + SSH_OPTS; } @Override public String toString() { return "Host " + _address; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/LaunchJar.java
package water.deploy; import java.io.*; import java.util.*; import java.util.Map.Entry; import java.util.jar.*; import javassist.*; import water.*; import water.api.DocGen; import water.util.Utils; public class LaunchJar extends Request2 { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @API(help = "Jars keys", required = true, filter = Default.class) public String jars; @API(help = "Class to instantiate and launch", required = true, filter = Default.class) public String job_class; @Override protected Response serve() { final Job job; try { // Move jars from KV store to tmp files ClassPool pool = new ClassPool(true); ArrayList<JarEntry> entries = new ArrayList<JarEntry>(); String[] splits = jars.split(","); for( int i = 0; i < splits.length; i++ ) { Key key = Key.make(splits[i]); throw H2O.unimpl(); //ValueArray va = UKV.get(key); //File file = File.createTempFile("h2o", ".jar"); //Utils.writeFileAndClose(file, va.openStream()); //DKV.remove(key); //pool.appendClassPath(file.getPath()); // //JarFile jar = new JarFile(file); //Enumeration e = jar.entries(); //while( e.hasMoreElements() ) { // JarEntry entry = (JarEntry) e.nextElement(); // entries.add(entry); //} //jar.close(); } // Append UID to class names so allow multiple invocations String uid = Key.rand(); ClassMap renames = new ClassMap(); for( JarEntry entry : entries ) { if( entry.getName().endsWith(".class") ) { String n = Utils.className(entry.getName()); String u; int index = n.indexOf("$"); if( index < 0 ) index = n.length(); u = n.substring(0, index) + uid + n.substring(index); renames.put(n, u); } } ArrayList<CtClass> updated = new ArrayList(); for( Entry<String, String> entry : ((Map<String, String>) renames).entrySet() ) { CtClass c = pool.get(entry.getKey().replace('/', '.')); c.replaceClassName(renames); updated.add(c); } // Create jar file and register it on each node HashSet<String> packages = new HashSet(); ByteArrayOutputStream mem = new ByteArrayOutputStream(); JarOutputStream jar = new JarOutputStream(mem); DataOutputStream bc = new DataOutputStream(jar); for( CtClass c : updated ) { jar.putNextEntry(new JarEntry(c.getName().replace('.', '/') + ".class")); c.toBytecode(bc); bc.flush(); String p = c.getPackageName(); if( p == null ) throw new IllegalArgumentException("Package is null for class " + c); packages.add(p); } jar.close(); weavePackages(packages.toArray(new String[0])); AddJar task = new AddJar(); task._data = mem.toByteArray(); task.invokeOnAllNodes(); // Start job Class c = Class.forName(job_class + uid); job = (Job) c.newInstance(); job.fork(); } catch( Exception ex ) { throw new RuntimeException(ex); } return Response.done(this); } public static void weavePackages(String... names) { WeavePackages task = new WeavePackages(); task._names = names; task.invokeOnAllNodes(); } static class WeavePackages extends DRemoteTask { String[] _names; @Override public void lcompute() { for( String name : _names ) Boot.weavePackage(name); tryComplete(); } @Override public void reduce(DRemoteTask drt) { } } static class AddJar extends DRemoteTask { byte[] _data; @Override public void lcompute() { try { File file = File.createTempFile("h2o", ".jar"); Utils.writeFileAndClose(file, new ByteArrayInputStream(_data)); Boot._init.addExternalJars(file); tryComplete(); } catch( Exception ex ) { throw new RuntimeException(ex); } } @Override public void reduce(DRemoteTask drt) { } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/Node.java
package water.deploy; import java.io.IOException; public interface Node { void inheritIO(); void persistIO(String outFile, String errFile) throws IOException; void start(); int waitFor(); void kill(); }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/NodeCL.java
package water.deploy; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.*; import java.util.*; import water.Boot; import water.util.Log; /** * Creates a node in-process using a separate class loader. */ public class NodeCL extends Thread implements Node { private final URL[] _classpath; private final String[] _args; private final Class _main; private ClassLoader _initialClassLoader, _classLoader; public NodeCL(Class main, String[] args) { super("NodeCL"); _args = args; _main = main; _classpath = getClassPath(); _initialClassLoader = Thread.currentThread().getContextClassLoader(); _classLoader = new URLClassLoader(_classpath, null); } @Override public void inheritIO() { // TODO add -id to PID? // invoke(className, methodName, args) } @Override public void persistIO(String outFile, String errFile) throws IOException { // TODO // invoke(className, methodName, args) } @Override public void kill() { // TODO // invoke(className, methodName, args) } static URL[] getClassPath() { String[] classpath = System.getProperty("java.class.path").split(File.pathSeparator); try { final List<URL> list = new ArrayList<URL>(); if( classpath != null ) { for( final String element : classpath ) { list.addAll(getDirectoryClassPath(element)); list.add(new File(element).toURI().toURL()); } } return list.toArray(new URL[list.size()]); } catch( Exception e ) { throw Log.errRTExcept(e); } } @Override public int waitFor() { try { join(); return 0; } catch( InterruptedException e ) { throw Log.errRTExcept(e); } } @Override public void run() { assert Thread.currentThread().getContextClassLoader() == _initialClassLoader; Thread.currentThread().setContextClassLoader(_classLoader); try { Class<?> c = _classLoader.loadClass(Context.class.getName()); Method method = c.getMethod("run", String.class, String[].class); method.setAccessible(true); method.invoke(null, _main.getName(), _args); } catch( Exception e ) { throw Log.errRTExcept(e); } finally { Thread.currentThread().setContextClassLoader(_initialClassLoader); } } private static List<URL> getDirectoryClassPath(String aDir) { try { final List<URL> list = new LinkedList<URL>(); final File dir = new File(aDir); final URL directoryURL = dir.toURI().toURL(); final String[] children = dir.list(); if( children != null ) { for( final String element : children ) { if( element.endsWith(".jar") ) { final URL url = new URL(directoryURL, URLEncoder.encode(element, "UTF-8")); list.add(url); } } } return list; } catch( Exception e ) { throw Log.errRTExcept(e); } } static class Context { public static void run(String main, String[] args) throws Exception { // Boot takes SystemClassLoader as parent, override with ours Field parent = ClassLoader.class.getDeclaredField("parent"); parent.setAccessible(true); parent.set(Boot._init, Thread.currentThread().getContextClassLoader()); Boot.main(Class.forName(main), args); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/NodeHost.java
package water.deploy; import java.io.IOException; import water.deploy.VM.SSH; /** * Creates a node on a host. */ public class NodeHost implements Node { private final SSH _ssh; public NodeHost(Host host, String[] args) { _ssh = new SSH(host, VM.javaArgs(NodeVM.class.getName()), args); } public Host host() { return _ssh.host(); } @Override public void inheritIO() { _ssh.inheritIO(); } @Override public void persistIO(String outFile, String errFile) throws IOException { _ssh.persistIO(outFile, errFile); } @Override public void start() { _ssh.startThread(); } @Override public int waitFor() { try { _ssh._thread.join(); } catch( InterruptedException e ) { } return 0; } @Override public void kill() { try { _ssh.kill(); } catch( Exception xe ) { } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/NodeVM.java
package water.deploy; import water.Boot; /** * Creates a node in a VM. */ public class NodeVM extends VM implements Node { public NodeVM() { this(null); } public NodeVM(String[] args) { super(javaArgs(NodeVM.class.getName()), args); } public static void main(String[] args) throws Exception { exitWithParent(); Boot.main(args); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/deploy/VM.java
package water.deploy; import java.io.*; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.net.*; import java.util.ArrayList; import java.util.Arrays; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.StringUtils; import water.Boot; import water.H2O; import water.util.Log; /** * Executes code in a separate VM. */ public abstract class VM { private final ArrayList<String> _args; private Process _process; private boolean _inherit; private File _out, _err; public VM(String[] java, String[] args) { _args = new ArrayList<String>(); _args.add(System.getProperty("java.home") + "/bin/java"); // Iterate on URIs in case jar has been unpacked by Boot _args.add("-cp"); String cp = ""; for( URL url : ((URLClassLoader) ClassLoader.getSystemClassLoader()).getURLs() ) { try { cp += new File(new URI(url.toString())) + File.pathSeparator; } catch( URISyntaxException e ) { throw Log.errRTExcept(e); } } _args.add(cp); _args.addAll(Arrays.asList(java)); if( args != null ) _args.addAll(Arrays.asList(args)); } static String[] javaArgs(String main) { RuntimeMXBean r = ManagementFactory.getRuntimeMXBean(); ArrayList<String> list = new ArrayList<String>(); for( String s : r.getInputArguments() ) if( !s.startsWith("-agentlib") ) if( !s.startsWith("-Xbootclasspath") ) list.add(s); if( System.getProperty(H2O.DEBUG_ARG) != null ) if( list.indexOf("-D" + H2O.DEBUG_ARG) < 0 ) list.add("-D" + H2O.DEBUG_ARG); list.add(main); return list.toArray(new String[list.size()]); } public Process process() { return _process; } public void inheritIO() { _inherit = true; } public void persistIO(String out, String err) throws IOException { _out = new File(out); _err = new File(err); } public void start() { ProcessBuilder builder = new ProcessBuilder(_args); try { assert !_inherit || (_out == null); _process = builder.start(); if( _inherit ) inheritIO(_process, null); if( _out != null ) persistIO(_process, _out, _err); } catch( IOException e ) { throw Log.errRTExcept(e); } } public boolean isAlive() { try { _process.exitValue(); return false; } catch( IllegalThreadStateException xe ) { return true; } catch( Exception e ) { throw Log.errRTExcept(e); } } public int waitFor() { try { return _process.waitFor(); } catch( InterruptedException e ) { throw Log.errRTExcept(e); } } public void kill() { _process.destroy(); try { _process.waitFor(); } catch( InterruptedException xe ) { // Ignore } } public static void exitWithParent() { Thread thread = new Thread() { @Override public void run() { // Avoid on Windows as it exits immediately. Seems to work using Java7 // ProcessBuilder.redirectInput, but we need to run on Java 6 for now if( !System.getProperty("os.name").toLowerCase().contains("win") ) { for( ;; ) { int b; try { b = System.in.read(); } catch( Exception e ) { b = -1; } if( b < 0 ) { Log.info("Assuming parent done, exit(0)"); H2O.exit(0); } } } } }; thread.setDaemon(true); thread.start(); } public static void inheritIO(Process process, final String header) { forward(process, header, process.getInputStream(), System.out); forward(process, header, process.getErrorStream(), System.err); } public static void persistIO(Process process, File out, File err) throws IOException { forward(process, null, process.getInputStream(), new PrintStream(out)); forward(process, null, process.getErrorStream(), new PrintStream(err)); } private static void forward(Process process, final String header, InputStream source, final PrintStream target) { final BufferedReader source_ = new BufferedReader(new InputStreamReader(source)); Thread thread = new Thread() { @Override public void run() { try { for( ;; ) { String line = source_.readLine(); if( line == null ) break; String s = header == null ? line : header + line; Log.unwrap(target, s); } } catch( IOException e ) { // Ignore, process probably done } } }; thread.start(); } /** * A VM whose only job is to wait for its parent to be gone, then kill its child process. * Otherwise every killed test leaves a bunch of orphan ssh and java processes. */ public static class Watchdog extends VM { public Watchdog(String[] java, String[] node) { super(java, node); } protected static void exec(ArrayList<String> list) throws Exception { ProcessBuilder builder = new ProcessBuilder(list); final Process process = builder.start(); NodeVM.inheritIO(process, null); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { process.destroy(); } }); process.waitFor(); } } static class Params implements Serializable { String[] _host, _java, _node; Params(Host host, String[] java, String[] node) { _host = new String[] { host.address(), host.user(), host.key() }; _java = java; _node = node; } } public static File h2oFolder() { File target; if( Boot._init.fromJar() ) target = new File(Boot._init.jarPath()); else { try { URL url = Boot._init.getResource(H2O.class.getName().replace('.', '/') + ".class"); target = new File(url.toURI()).getParentFile().getParentFile().getParentFile(); } catch( URISyntaxException e ) { throw new RuntimeException(e); } } return target.getParentFile(); } /** * A remote JVM, launched over SSH. */ public static class SSH extends Watchdog { Host _host; Thread _thread; public SSH(Host host, String[] java, String[] node) { this(new String[] { SSH.class.getName() }, host, java, node); } public SSH(String[] localJava, Host host, String[] java, String[] node) { super(localJava, new String[] { write(new Params(host, java, node)) }); _host = host; } public Host host() { return _host; } final void startThread() { _thread = new Thread() { @Override public void run() { try { SSH.this.start(); SSH.this.waitFor(); } catch( Exception ex ) { Log.err(ex); } } }; _thread.setDaemon(true); _thread.start(); } public static void main(String[] args) throws Exception { exitWithParent(); Params p = read(args[0]); Host host = new Host(p._host[0], p._host[1], p._host[2]); ArrayList<String> list = new ArrayList<String>(); list.addAll(Arrays.asList(host.sshWithArgs().split(" "))); list.add(host.address()); list.add(command(p._java, p._node)); exec(list); } static String command(String[] javaArgs, String[] nodeArgs) { String cp = ""; try { String h2o = h2oFolder().getCanonicalPath(); for( String s : System.getProperty("java.class.path").split(File.pathSeparator) ) { cp += cp.length() != 0 ? ":" : ""; String path = new File(s).getCanonicalPath(); if( path.startsWith(h2o) ) path = path.substring(h2o.length() + 1); cp += path.replace('\\', '/').replace(" ", "\\ "); } } catch( IOException e ) { throw Log.errRTExcept(e); } String java = Cloud.JRE != null ? new File(Cloud.JRE).getName() + "/bin/java" : "java"; String command = "cd " + Host.FOLDER + ";" + java + " -cp " + cp; for( String s : javaArgs ) command += " " + s; for( String s : nodeArgs ) command += " " + s; return command.replace("$", "\\$"); } } static String write(Serializable s) { try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutput out = null; try { out = new ObjectOutputStream(bos); out.writeObject(s); return StringUtils.newStringUtf8(Base64.encodeBase64(bos.toByteArray(), false)); } finally { out.close(); bos.close(); } } catch( Exception ex ) { throw Log.errRTExcept(ex); } } static <T> T read(String s) { try { ObjectInput in = new ObjectInputStream(new ByteArrayInputStream(Base64.decodeBase64(s))); try { return (T) in.readObject(); } finally { in.close(); } } catch( Exception ex ) { throw Log.errRTExcept(ex); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/AST.java
package water.exec; import water.*; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.Vec; import java.text.NumberFormat; import java.text.ParsePosition; import java.util.ArrayList; import java.util.Arrays; import java.util.Locale; /** Parse a generic R string and build an AST, in the context of an H2O Cloud * @author cliffc@0xdata.com */ // -------------------------------------------------------------------------- abstract public class AST extends Iced { final Type _t; AST( Type t ) { assert t != null; _t = t; } static AST parseCXExpr(Exec2 E, boolean EOS ) { if( EOS && E.peekEOS() ) { E._x--; return new ASTNop(); } AST ast2, ast = ASTApply.parseInfix(E,0,EOS); if( ast == null ) return ASTAssign.parseNew(E,EOS); // In case of a slice or id, try match an assignment if( ast instanceof ASTSlice || ast instanceof ASTId) if( (ast2 = ASTAssign.parse(E,ast,EOS)) != null ) return ast2; // Next try match an IFELSE statement if( (ast2 = ASTIfElse.parse(E,ast,EOS)) != null ) return ast2; // Return the infix: op1* expr {op2 op1* expr}* return ast; } static AST parseVal(Exec2 E, boolean EOS ) { E.skipWS(EOS); AST ast; // Simple paren expression if( E.peek('(',EOS) ) return E.xpeek(')',E._x,parseCXExpr(E,false)); if( (ast = ASTId .parse(E)) != null ) return ast; if( (ast = ASTNum .parse(E)) != null ) return ast; if( (ast = ASTOp .parse(E)) != null ) return ast; if( (ast = ASTStr .parse(E)) != null ) return ast; return null; } abstract void exec(Env env); boolean isPosConstant() { return false; } // Scrape out a column name, if we can. NULL if we cannot. String argName() { return null; } protected StringBuilder indent( StringBuilder sb, int d ) { for( int i=0; i<d; i++ ) sb.append(" "); return sb.append(_t).append(' '); } public StringBuilder toString( StringBuilder sb, int d ) { return indent(sb,d).append(this); } } // -------------------------------------------------------------------------- class ASTNop extends AST { ASTNop() { super(Type.DBL); } @Override void exec(Env env) { env.push(Double.NaN); } } // -------------------------------------------------------------------------- class ASTStatement extends AST { final AST[] _asts; ASTStatement( AST[] asts ) { super(asts[asts.length-1]._t); _asts = asts; } static ASTStatement parse( Exec2 E ) { ArrayList<AST> asts = new ArrayList<AST>(); while( true ) { AST ast = parseCXExpr(E,true); if( ast == null ) break; asts.add(ast); if( !E.peekEOS() ) break; // if not finding statement separator, break } if( asts.size()==0 ) return null; return new ASTStatement(asts.toArray(new AST[asts.size()])); } @Override void exec(Env env) { for( int i=0; i<_asts.length-1; i++ ) { _asts[i].exec(env); // Exec all statements env.pop(); // Pop all intermediate results } _asts[_asts.length-1].exec(env); // Return final statement as result } @Override public String toString() { return ";;;"; } @Override public StringBuilder toString( StringBuilder sb, int d ) { for( int i=0; i<_asts.length-1; i++ ) _asts[i].toString(sb,d+1).append(";\n"); return _asts[_asts.length-1].toString(sb,d+1); } } // -------------------------------------------------------------------------- class ASTApply extends AST { final AST _args[]; private ASTApply( AST args[] ) { super(args[0]._t.ret()); _args = args; } // Wrap compatible but different-sized ops in reduce/bulk ops. static ASTApply make(AST args[],Exec2 E, int x) { // Make a type variable for this application Type ts[] = new Type[args.length]; ts[0] = Type.unbound(); for( int i=1; i<ts.length; i++ ) ts[i] = args[i]._t.find(); Type ft1 = Type.fcn(ts); AST fast = args[0]; Type ft2 = fast._t.find(); // Should be a function type if( ft1.union(ft2) ) // Union 'em return new ASTApply(args); // Error handling if( ft2.isNotFun() ) // Oops, failed basic sanity E.throwErr("Function-parens following a "+ft2,x); if( ft2._ts.length != ts.length ) E.throwErr("Passed "+(ts.length-1)+" args but expected "+(ft2._ts.length-1),x); String vars[] = ((ASTOp)fast)._vars; for( int i=1; i<ts.length; i++ ) if( !ft2._ts[i].union(args[i]._t) ) E.throwErr("Arg '"+vars[i]+"'"+" typed as "+ft2._ts[i]+" but passed "+args[i]._t.find(),x); throw H2O.fail(); } // Parse a prefix operator static AST parsePrefix(Exec2 E, boolean EOS) { int x0 = E._x; AST pre = parseVal(E,EOS); if( pre == null ) return null; while( true ) { int x = E._x; if( !E.peek('(', true) ) return pre; // Plain op, no prefix application if (pre._t.isNotFun()) { E._x = x0; if ((pre = ASTOp.parse(E)) == null) E.throwErr("No potential function was found.", x0); if( !E.peek('(') ) return pre; } AST args[] = new AST[] { pre, null }; int i=1; if( !E.peek(')') ) { while( true ) { if( (args[i++] = parseCXExpr(E,false)) == null ) E.throwErr("Missing argument",E._x); if (args[i-1] instanceof ASTAssign) { ASTAssign a = (ASTAssign)args[i-1]; if (a._lhs.argName() != null && a._lhs.argName().equals("na.rm")) { ASTReducerOp op = (ASTReducerOp)args[0]; op._narm = (a._eval.argName().equals("T") || a._eval.argName().equals("TRUE") || a._eval.toString().equals("1.0")); args[0] = op; } } if( E.peek(')') ) break; E.xpeek(',',E._x,null); if( i==args.length ) args = Arrays.copyOf(args,args.length<<1); } } args = Arrays.copyOf(args,i); pre = make(args,E,x); } } // Parse an infix boolean operator static AST parseInfix(Exec2 E, int curr_prec, boolean EOS) { int x = E._x; AST ast; E.skipWS(EOS); ASTOp op1 = ASTOp.parseUniInfixOp(E); if (op1 != null) { // CASE 1 ~ INFIX1 := [] OP INFIX if ((ast = parseInfix(E,op1._precedence,EOS)) != null) ast = make(new AST[]{op1,ast},E,x); else { // CASE 2 ~ INFIX1 := [] OP E._x = x; ast = ASTSlice.parse(E, EOS); } } else { // CASE 3 ~ INFIX1 := [] SLICE ast = ASTSlice.parse(E, EOS); } // CASE 0 ~ [] if (ast == null) return null; // INFIX := INFIX1 OP INFIX while( true ) { int op_x = E._x; E.skipWS(EOS); ASTOp op = ASTOp.parseBinInfixOp(E); if( op == null || op._precedence < curr_prec || (op.leftAssociate() && op._precedence == curr_prec) ) { E._x = op_x; return ast; } op_x = E._x; AST rite = parseInfix(E,op._precedence, false); if (rite == null) E.throwErr("Missing expr or unknown ID", op_x); ast = make(new AST[]{op,ast,rite},E,x); } } @Override public String toString() { return _args[0].toString()+"()"; } @Override public StringBuilder toString( StringBuilder sb, int d ) { _args[0].toString(sb,d).append("\n"); for( int i=1; i<_args.length-1; i++ ) _args[i].toString(sb,d+1).append('\n'); return _args[_args.length-1].toString(sb,d+1); } // Apply: execute all arguments (including the function argument) yielding // the function itself, plus all normal arguments on the stack. Then execute // the function, which is responsible for popping all arguments and pushing // the result. @Override void exec(Env env) { int sp = env._sp; for( AST arg : _args ) arg.exec(env); assert sp+_args.length==env._sp; env.fcn(-_args.length).apply(env,_args.length,this); } } // -------------------------------------------------------------------------- class ASTSlice extends AST { final AST _ast, _cols, _rows; // 2-D slice of an expression ASTSlice( Type t, AST ast, AST cols, AST rows ) { super(t); _ast = ast; _cols = cols; _rows = rows; } static AST parse(Exec2 E, boolean EOS ) { int x = E._x; AST ast = ASTApply.parsePrefix(E, EOS); if( ast == null ) return null; if( !E.peek('[',EOS) ) // Not start of slice? return ASTNamedCol.parse(E,ast,EOS); // Also try named col slice if( !Type.ARY.union(ast._t) ) E.throwErr("Not an ary",x); if( E.peek(']',false) ) return ast; // [] ===> same as no slice AST rows=E.xpeek(',',(x=E._x),parseCXExpr(E, false)); if( rows != null && !rows._t.union(Type.dblary()) ) E.throwErr("Must be scalar or array",x); AST cols=E.xpeek(']',(x=E._x),parseCXExpr(E, false)); if( cols != null && !cols._t.union(Type.dblary()) ) if (cols._t.isStr()) E.throwErr("The current Exec does not handle strings",x); else E.throwErr("Must be scalar or array",x); Type t = // Provable scalars will type as a scalar rows != null && rows.isPosConstant() && cols != null && cols.isPosConstant() ? Type.DBL : Type.ARY; return new ASTSlice(t,ast,cols,rows); } @Override void exec(Env env) { int sp = env._sp; _ast.exec(env); assert sp+1==env._sp; // Scalar load? Throws AIIOOB if out-of-bounds if( _t.isDbl() ) { // Known that rows & cols are simple positive constants. // Use them directly, throwing a runtime error if OOB. long row = (long)((ASTNum)_rows)._d; int col = (int )((ASTNum)_cols)._d; Frame ary=env.popAry(); String skey = env.key(); double d = ary.vecs()[col-1].at(row-1); env.subRef(ary,skey); // Toss away after loading from it env.push(d); } else { // Else It's A Big Copy. Some Day look at proper memory sharing, // disallowing unless an active-temp is available, etc. // Eval cols before rows (R's eval order). Frame ary=env._ary[env._sp-1]; // Get without popping Object cols = select(ary.numCols(),_cols,env); Object rows = select(ary.numRows(),_rows,env); Frame fr2 = ary.deepSlice(rows,cols); // After slicing, pop all expressions (cannot lower refcnt till after all uses) if( rows!= null ) env.pop(); if( cols!= null ) env.pop(); if( fr2 == null ) fr2 = new Frame(); // Replace the null frame with the zero-column frame env.pop(); // Pop sliced frame, lower ref env.push(fr2); } } // Execute a col/row selection & return the selection. NULL means "all". // Error to mix negatives & positive. Negative list is sorted, with dups // removed. Positive list can have dups (which replicates cols) and is // ordered. numbers. 1-based numbering; 0 is ignored & removed. static Object select( long len, AST ast, Env env ) { if( ast == null ) return null; // Trivial "all" ast.exec(env); long cols[]; if( !env.isAry() ) { int col = (int)env._d[env._sp-1]; // Peek double; Silent truncation (R semantics) if( col < 0 && col < -len ) col=0; // Ignore a non-existent column if( col == 0 ) return new long[0]; return new long[]{col}; } // Got a frame/list of results. // Decide if we're a toss-out or toss-in list Frame ary = env._ary[env._sp-1]; // Peek-frame if( ary.numCols() != 1 ) throw new IllegalArgumentException("Selector must be a single column: "+ary.toStringNames()); Vec vec = ary.anyVec(); // Check for a matching column of bools. if( ary.numRows() == len && vec.min()>=0 && vec.max()<=1 && vec.isInt() ) return ary; // Boolean vector selection. // Convert single vector to a list of longs selecting rows if(ary.numRows() > 10000000) throw H2O.fail("Unimplemented: Cannot explicitly select > 10000000 rows in slice."); cols = MemoryManager.malloc8((int)ary.numRows()); for(int i = 0; i < cols.length; ++i){ if(vec.isNA(i))throw new IllegalArgumentException("Can not use NA as index!"); cols[i] = vec.at8(i); } return cols; } @Override public String toString() { return "[,]"; } @Override public StringBuilder toString( StringBuilder sb, int d ) { indent(sb,d).append(this).append('\n'); _ast.toString(sb,d+1).append("\n"); if( _cols==null ) indent(sb,d+1).append("all\n"); else _cols.toString(sb,d+1).append("\n"); if( _rows==null ) indent(sb,d+1).append("all"); else _rows.toString(sb,d+1); return sb; } } // -------------------------------------------------------------------------- class ASTNamedCol extends AST { final AST _ast; // named slice of an expression final String _colname; // ASTNamedCol( Type t, AST ast, String colname ) { super(t); _ast = ast; _colname=colname; } static AST parse(Exec2 E, AST ast, boolean EOS ) { if( !E.peek('$',true) ) return ast; int x = E._x; E.skipWS(EOS); String colname = E.isID(); if( colname == null ) E.throwErr("Missing column name after $",x); return new ASTNamedCol(Type.ARY,ast,colname); } @Override void exec(Env env) { int sp = env._sp; _ast.exec(env); assert sp+1==env._sp; Frame ary=env.peekAry(); int cidx = ary.find(_colname); if( cidx== -1 ) throw new IllegalArgumentException("Missing column "+_colname+" in frame "+ary.toStringNames()); Frame fr2 = new Frame(new String[]{ary._names[cidx]}, new Vec[]{ary.vecs()[cidx]}); env.poppush(1,fr2,null); } @Override public String toString() { return "$"+_colname; } } // -------------------------------------------------------------------------- class ASTId extends AST { final String _id; final int _depth; // *Relative* lexical depth of definition final int _num; // Number/slot in the lexical scope ASTId( Type t, String id, int d, int n ) { super(t); _id=id; _depth=d; _num=n; } // Parse a valid ID, or return null; static ASTId parse(Exec2 E) { int x = E._x; String var = E.isID(); if( var == null ) return null; // Built-in ops parse as ops, not vars if( ASTOp.isInfixOp(var) ) { E._x=x; return null; } // See if pre-existing for( int d=E.lexical_depth(); d >=0; d-- ) { ArrayList<ASTId> asts = E._env.get(d); for( int i=asts.size()-1; i >=0; i-- ) { ASTId id = asts.get(i); if( var.equals(id._id) ) // Return an ID with a relative lexical depth and same slot# return new ASTId(id._t,id._id,E.lexical_depth()-d,id._num); } } // Never see-before ID? Treat as a bad parse E._x=x; return null; } // Parse a NEW valid ID, or return null; static String parseNew(Exec2 E) { int x = E._x; String id = E.isID(); if( id == null ) return null; // Built-in ops parse as ops, not vars if( ASTOp.isInfixOp(id) ) { E._x=x; return null; } return id; } @Override void exec(Env env) { // Local scope? Grab from the stack. if( _depth ==0 ) { env.push_slot(_depth,_num); return; } // Nested scope? need to grab from the nested-scope closure ASTFunc fcn = env.fcnScope(_depth); fcn._env.push_slot(_depth-1,_num,env); } @Override String argName() { return _id; } @Override public String toString() { return _id; } } class ASTStr extends AST { final String _str; ASTStr(String str) { super(Type.STR); _str=str; } // Parse a string, or throw a parse error static ASTStr parse(Exec2 E) { String str = E.isString(); if (str != null) { E._x += str.length()+2; //str + quotes return new ASTStr(str); } return null; } @Override void exec(Env env) { env.push(_str); } @Override public String toString() { return _str; } } // -------------------------------------------------------------------------- class ASTAssign extends AST { final AST _lhs; final AST _eval; ASTAssign( AST lhs, AST eval ) { super(eval._t); _lhs=lhs; _eval=eval; } // Parse a valid LHS= or return null static ASTAssign parse(Exec2 E, AST ast, boolean EOS) { int x = E._x; // Allow '=' and '<-' assignment if( !E.isAssign(EOS) ) return null; AST ast2=ast; ASTSlice slice= null; if( (ast instanceof ASTSlice) ) // Peek thru slice op ast2 = (slice=(ASTSlice)ast)._ast; // Must be a simple in-scope ID if( !(ast2 instanceof ASTId) ) E.throwErr("Can only assign to ID (or slice)",x); ASTId id = (ASTId)ast2; final AST eval = parseCXExpr(E, false); if( eval == null ) E.throwErr("Missing RHS",x); boolean partial = slice != null && (slice._cols != null || slice._rows != null); if( partial ) { // Partial slice assignment? if( eval._t.isFcn() ) E.throwErr("Assigning a "+eval._t+" into '"+id._id+"' which is a "+id._t,x); if( E.lexical_depth()> 0 ) throw H2O.unimpl(); // Must copy whole array locally, before updating the local copy } if( id._depth > 0 ) { // Shadowing an outer scope? // Inner-scope assignment to a new local ast = id = extend_local(E,eval._t,id._id); } else { // Overwriting same scope if( E.lexical_depth()>0 ) { // Inner scope? if( !ast._t.union(eval._t) ) // Disallow type changes in local scope in functions. E.throwErr("Assigning a "+eval._t+" into '"+id._id+"' which is a "+id._t,x); } else { // Outer scope; can change type willy-nilly if( !partial && !ast._t.union(eval._t) ) { ArrayList<ASTId> vars = E._env.get(0); ASTId id2 = new ASTId(eval._t,id._id,0,id._num); vars.set(id2._num,id2); ast = id2; } } } return new ASTAssign(ast,eval); } // Parse a valid LHS= or return null - for a new variable static ASTAssign parseNew(Exec2 E, boolean EOS) { int x = E._x; String var = ASTId.parseNew(E); if( var == null ) return null; if( !E.isAssign(EOS) ) { if( Exec2.isLetter(var.charAt(0) ) ) E.throwErr("Unknown var "+var,x); E._x = x; return null; // Let higher parse levels sort it out } x = E._x; AST eval = parseCXExpr(E, EOS); if( eval == null ) E.throwErr("Missing RHS",x); // Extend the local environment by the new name return new ASTAssign(extend_local(E,eval._t,var),eval); } static ASTId extend_local( Exec2 E, Type t, String var ) { ArrayList<ASTId> vars = E._env.get(E.lexical_depth()); ASTId id = new ASTId(t,var,0,vars.size()); vars.add(id); return id; } @Override void exec(Env env) { _eval.exec(env); // RHS before LHS (R eval order) if( _lhs instanceof ASTId ) { ASTId id = (ASTId)_lhs; env.tos_into_slot(id._depth, id._num, id._id); return; } // Peel apart a slice assignment ASTSlice slice = (ASTSlice)_lhs; ASTId id = (ASTId)slice._ast; assert id._depth==0; // Can only modify in the local scope. // Simple assignment using the slice syntax if( slice._rows==null & slice._cols==null ) { env.tos_into_slot(id._depth,id._num,id._id); return; } // Pull the LHS off the stack; do not lower the refcnt Frame ary = env.frId(id._depth,id._num); // Pull the RHS off the stack; do not lower the refcnt Frame ary_rhs=null; double d=Double.NaN; if( env.isDbl() ) d = env._d[env._sp-1]; else ary_rhs = env.peekAry(); // Pop without deleting // Typed as a double ==> the row & col selectors are simple constants if( slice._t == Type.DBL ) { // Typed as a double? assert ary_rhs==null; long row = (long)((ASTNum)slice._rows)._d-1; int col = (int )((ASTNum)slice._cols)._d-1; Chunk c = ary.vecs()[col].chunkForRow(row); c.set(row,d); Futures fs = new Futures(); c.close(c.cidx(),fs); fs.blockForPending(); env.push(d); return; } // Execute the slice LHS selection operators Object cols = ASTSlice.select(ary.numCols(),slice._cols,env); Object rows = ASTSlice.select(ary.numRows(),slice._rows,env); long[] cs1; long[] rs1; if(cols != null && rows != null && (cs1 = (long[])cols).length == 1 && (rs1 = (long[])rows).length == 1) { assert ary_rhs == null; long row = rs1[0]-1; int col = (int)cs1[0]-1; if(col >= ary.numCols() || row >= ary.numRows()) throw H2O.unimpl(); if(ary.vecs()[col].isEnum()) throw new IllegalArgumentException("Currently can only set numeric columns"); ary.vecs()[col].set(row,d); env.push(d); return; } // Partial row assignment? if( rows != null ) { // Only have partial row assignment if (cols == null) { // For every col at the range of indexes, set the value to be the rhs. // If the rhs is a double, then fill with doubles, NA where type is Enum. if (ary_rhs == null) { // Make a new Vec where each row to be written over has the value d final long[] rows0 = (long[]) rows; final double d0 = d; Vec v = new MRTask2() { @Override public void map(Chunk cs) { for (long er : rows0) { er = Math.abs(er) - 1; // 1-based -> 0-based if (er < cs._start || er > (cs._len + cs._start - 1)) continue; cs.set0((int) (er - cs._start), d0); } } }.doAll(ary.anyVec().makeZero()).getResult()._fr.anyVec(); // MRTask over the lhs array new MRTask2() { @Override public void map(Chunk[] chks) { // Replace anything that is non-zero in the rep_vec. Chunk rep_vec = chks[chks.length-1]; for (int row = 0; row < chks[0]._len; ++row) { if (rep_vec.at0(row) == 0) continue; for (Chunk chk : chks) { if (chk._vec.isEnum()) { chk.setNA0(row); } else { chk.set0(row, d0); } } } } }.doAll(ary.add("rep_vec",v)); UKV.remove(v._key); UKV.remove(ary.remove(ary.numCols()-1)._key); // If the rhs is an array, then fail if `height` of the rhs != rows.length. Otherwise, fetch-n-fill! (expensive) } else { throw H2O.unimpl(); } // Have partial row and col assignment } else { throw H2O.unimpl(); } // throw H2O.unimpl(); } else { assert cols != null; // all/all assignment uses simple-assignment // Convert constant into a whole vec if (ary_rhs == null) ary_rhs = new Frame(ary.anyVec().makeCon(d)); // Make sure we either have 1 col (repeated) or exactly a matching count long[] cs = (long[]) cols; // Columns to act on if (ary_rhs.numCols() != 1 && ary_rhs.numCols() != cs.length) throw new IllegalArgumentException("Can only assign to a matching set of columns; trying to assign " + ary_rhs.numCols() + " cols over " + cs.length + " cols"); // Replace the LHS cols with the RHS cols Vec rvecs[] = ary_rhs.vecs(); Futures fs = new Futures(); for (int i = 0; i < cs.length; i++) { int cidx = (int) cs[i] - 1; // Convert 1-based to 0-based Vec rv = env.addRef(rvecs[rvecs.length == 1 ? 0 : i]); if (cidx == ary.numCols()) { if (!rv.group().equals(ary.anyVec().group())) { env.subRef(rv); rv = ary.anyVec().align(rv); env.addRef(rv); } ary.add("C" + String.valueOf(cidx + 1), rv); // New column name created with 1-based index } else { if (!(rv.group().equals(ary.anyVec().group())) && rv.length() == ary.anyVec().length()) { env.subRef(rv); rv = ary.anyVec().align(rv); env.addRef(rv); } fs = env.subRef(ary.replace(cidx, rv), fs); } } fs.blockForPending(); } // After slicing, pop all expressions (cannot lower refcnt till after all uses) int narg = 0; if( rows!= null ) narg++; if( cols!= null ) narg++; env.pop(narg); } @Override String argName() { return _lhs instanceof ASTId ? ((ASTId)_lhs)._id : null; } @Override public String toString() { return "="; } @Override public StringBuilder toString( StringBuilder sb, int d ) { indent(sb,d).append(this).append('\n'); _lhs.toString(sb,d+1).append('\n'); _eval.toString(sb,d+1); return sb; } } // -------------------------------------------------------------------------- class ASTNum extends AST { static final NumberFormat NF = NumberFormat.getInstance(Locale.US); static { NF.setGroupingUsed(false); } final double _d; ASTNum(double d) { super(Type.DBL); _d=d; } // Parse a number, or throw a parse error static ASTNum parse(Exec2 E) { int startPosition = E._x; MyInteger charactersConsumed = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(E._str, startPosition, charactersConsumed); if( charactersConsumed._val == 0 ) return null; E._x = startPosition + charactersConsumed._val; return new ASTNum(d); } boolean isPosConstant() { return _d >= 0; } @Override void exec(Env env) { env.push(_d); } @Override public String toString() { return Double.toString(_d); } /** Wrap an integer so that it can be modified by a called method. i.e. Pass-by-reference. */ static class MyInteger { public int _val; } /** * Parse a scientific number more correctly for commands passed in from R. * Unfortunately, NumberFormat.parse doesn't get the job done. * It expects 'E' and can't handle 'e' or 'E+nnn'. * * @param s String to parse * @param startPosition Starting position in the string to parse from. * @param charactersConsumed [output] Characters consumed. * @return The parsed value if one was found, null otherwise. If a value was parsed, charactersConsumed will be set to something greater than 0. If no value was parsed, charactersConsumed will be 0. */ static private double parseNumberWithScientificNotationProperlyHandled (String s, final int startPosition, MyInteger charactersConsumed) { charactersConsumed._val = 0; // Paranoid. ParsePosition pp = new ParsePosition(startPosition); Number N = NF.parse(s, pp); if ( pp.getIndex()==startPosition ) // If no number was found, just return immediately. return 0; assert N instanceof Double || N instanceof Long; // Check if the number we just parsed had an 'e' or 'E' in it. So it's scientific already. for (int i = startPosition; i < pp.getIndex(); i++) { char c = s.charAt(i); if ((c == 'e') || (c == 'E')) { // We already got a scientific number. Return it. charactersConsumed._val = pp.getIndex() - startPosition; if( N instanceof Double ) return (Double)N; return (double)(Long)N; } } // If we consumed all of str, then just return the value now. assert (pp.getIndex() <= s.length()); if (pp.getIndex() >= s.length()) { charactersConsumed._val = pp.getIndex() - startPosition; if( N instanceof Double ) return (Double)N; return (double)(Long)N; } // If the lookahead character is not 'e' then just return the value now. char lookaheadChar = s.charAt(pp.getIndex()); if ((lookaheadChar != 'e') && (lookaheadChar != 'E')) { charactersConsumed._val = pp.getIndex() - startPosition; if( N instanceof Double ) return (Double)N; return (double)(Long)N; } // The lookahead character is 'e'. Find the remaining trailing numbers // and attach them to this token. // Start with sb as stuff from NF.parse plus the 'e'. StringBuilder sb = new StringBuilder(); sb.append(s.substring(startPosition, Math.min(s.length(),pp.getIndex() + 2))); for( int i = pp.getIndex() + 2; i < s.length(); i++ ) { char c = s.charAt(i); if( c!='+' && c!='-' && !Character.isDigit(c) ) // Only +-digits allowed after that. break; sb.append(c); } // Really parse the double now. If we fail here, just bail out and don't // consider it a number. try { double d = Double.valueOf(sb.toString()); charactersConsumed._val = sb.length(); // Set length consumed before return return d; } catch (Exception e) { return 0; } // No set length; just return. } public static void main (String[] args) { // Unit tests for horrible Double.valueOf parsing hack. { String s = "fooo1.23e+154"; int i = 4; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23e+154; assert C._val == 9; System.out.println (d); } { String s = "fooo1.23e+154blah"; int i = 4; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23e+154; assert C._val == 9; System.out.println (d); } { String s = "fooo1.23e14blah"; int i = 4; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23e14; assert C._val == 7; System.out.println (d); } { String s = "fooo1.23e"; int i = 4; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 0; assert C._val == 0; System.out.println (d); } { String s = "fooo1.23E-10"; int i = 4; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23E-10; assert C._val == 8; System.out.println (d); } { String s = "1.23E-10"; int i = 0; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23E-10; assert C._val == 8; System.out.println (d); } { String s = "1.23E10E22"; int i = 0; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.23E10; assert C._val == 7; System.out.println (d); } { String s = "hex[( hex[,c(5)] <= 1.97872258214 ) & ( hex[,c(6)] <= 32.8571773789 ) & ( ( hex[,c(2)] <= 72.2154196079 )) ,]"; int i = 20; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1.97872258214; assert C._val == 13; System.out.println (d); } { String s = "1 "; int i = 0; MyInteger C = new MyInteger(); double d = parseNumberWithScientificNotationProperlyHandled(s, i, C); assert d == 1; assert C._val == 1; System.out.println (d); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/ASTFunc.java
package water.exec; import java.util.ArrayList; import water.H2O; import water.Key; import water.fvec.AppendableVec; import water.fvec.Frame; import water.fvec.NewChunk; import water.fvec.Vec; /** Parse a generic R string and build an AST, in the context of an H2O Cloud * @author cliffc@0xdata.com */ // -------------------------------------------------------------------------- public class ASTFunc extends ASTOp { final AST _body; final int _tmps; Env _env; // Captured environment at each apply point ASTFunc( String vars[], Type vtypes[], AST body, int tmps ) { super(vars,vtypes,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); _body = body; _tmps=tmps; } ASTFunc( String vars[], Type t, AST body, int tmps ){ super(vars,t,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); _body = body; _tmps=tmps; } @Override String opStr() { return "fun"; } //@Override ASTOp make() { throw H2O.fail();} @Override ASTOp make() { return new ASTFunc(_vars, _t.copy(), _body, _tmps); } static ASTOp parseFcn(Exec2 E ) { int x = E._x; String var = E.isID(); if( var == null ) return null; if( !"function".equals(var) ) { E._x = x; return null; } E.xpeek('(',E._x,null); ArrayList<ASTId> vars = new ArrayList<ASTId>(); if( !E.peek(')',false) ) { while( true ) { x = E._x; var = E.isID(); if( var == null ) E.throwErr("Invalid var",x); for( ASTId id : vars ) if( var.equals(id._id) ) E.throwErr("Repeated argument",x); // Add unknown-type variable to new vars list vars.add(new ASTId(Type.unbound(),var,0,vars.size())); if( E.peek(')') ) break; E.xpeek(',',E._x,null); E.skipWS(); } } int argcnt = vars.size(); // Record current size, as body may extend // Parse the body E._env.push(vars); AST body = E.peek('{',false) ? E.xpeek('}',E._x,ASTStatement.parse(E)) : parseCXExpr(E,true); if( body == null ) E.throwErr("Missing function body",x); E._env.pop(); // The body should force the types. Build a type signature. String xvars[] = new String[argcnt+1]; Type types[] = new Type [argcnt+1]; xvars[0] = "fun"; types[0] = body._t; // Return type of body for( int i=0; i<argcnt; i++ ) { ASTId id = vars.get(i); xvars[i+1] = id._id; types[i+1] = id._t; } return new ASTFunc(xvars,types,body,vars.size()-argcnt); } @Override void exec(Env env) { // We need to push a Closure: the ASTFunc plus captured environment. // Make a shallow copy (the body remains shared across all ASTFuncs). // Then fill in the current environment. ASTFunc fun = (ASTFunc)clone(); fun._env = env.capture(false); env.push(fun); } @Override void apply(Env env, int argcnt, ASTApply apply) { int res_idx = env.pushScope(argcnt-1); env.push(_tmps); _body.exec(env); env.tos_into_slot(res_idx-1,null); env.popScope(); } @Override double[] map(Env env, double[] in, double[] out) { final int sp = env._sp; Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0]; AppendableVec av = new AppendableVec(key); NewChunk nc = new NewChunk(av,0); for (double v : in) nc.addNum(v); nc.close(0,null); Frame fr = new Frame(new String[]{"row"},new Vec[]{av.close(null)}); env.push(this); env.push(fr); this.apply(env,2,null); if (env.isDbl()) { if (out==null || out.length<1) out= new double[1]; out[0] = env.popDbl(); } else if (env.isAry()) { fr = env.peekAry(); if (fr.vecs().length > 1) H2O.unimpl(); Vec vec = fr.anyVec(); if (vec.length() > 1<<8) H2O.unimpl(); if (out==null || out.length<vec.length()) out= new double[(int)vec.length()]; for (long i = 0; i < vec.length(); i++) out[(int)i] = vec.at(i); env.pop(); } else { H2O.unimpl(); } assert sp == env._sp; return out; } @Override public StringBuilder toString( StringBuilder sb, int d ) { indent(sb,d).append(this).append(") {\n"); _body.toString(sb,d+1).append("\n"); return indent(sb,d).append("}"); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/ASTOp.java
package water.exec; import hex.FrameTask.DataInfo; import hex.Quantiles; import hex.gram.Gram.GramTask; import hex.la.Matrix; import org.apache.commons.math3.util.ArithmeticUtils; import org.joda.time.DateTime; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; import water.*; import water.api.QuantilesPage; import water.fvec.*; import water.util.Utils; import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode; import java.util.*; import static water.util.Utils.seq; /** Parse a generic R string and build an AST, in the context of an H2O Cloud * @author cliffc@0xdata.com */ // -------------------------------------------------------------------------- public abstract class ASTOp extends AST { // The order of operator precedence follows R rules. // Highest the first static final public int OPP_PREFIX = 100; /* abc() */ static final public int OPP_POWER = 13; /* ^ */ static final public int OPP_UPLUS = 12; /* + */ static final public int OPP_UMINUS = 12; /* - */ static final public int OPP_INTDIV = 11; /* %/% */ static final public int OPP_MOD = 11; /* %xyz% */ static final public int OPP_MUL = 10; /* * */ static final public int OPP_DIV = 10; /* / */ static final public int OPP_PLUS = 9; /* + */ static final public int OPP_MINUS = 9; /* - */ static final public int OPP_GT = 8; /* > */ static final public int OPP_GE = 8; /* >= */ static final public int OPP_LT = 8; /* < */ static final public int OPP_LE = 8; /* <= */ static final public int OPP_EQ = 8; /* == */ static final public int OPP_NE = 8; /* != */ static final public int OPP_NOT = 7; /* ! */ static final public int OPP_AND = 6; /* &, && */ static final public int OPP_OR = 5; /* |, || */ static final public int OPP_DILDA = 4; /* ~ */ static final public int OPP_RARROW = 3; /* ->, ->> */ static final public int OPP_ASSN = 2; /* = */ static final public int OPP_LARROW = 1; /* <-, <<- */ // Operator assocation order static final public int OPA_LEFT = 0; static final public int OPA_RIGHT = 1; // Operation formula notations static final public int OPF_INFIX = 0; static final public int OPF_PREFIX = 1; // Tables of operators by arity static final public HashMap<String,ASTOp> UNI_INFIX_OPS = new HashMap(); static final public HashMap<String,ASTOp> BIN_INFIX_OPS = new HashMap(); static final public HashMap<String,ASTOp> PREFIX_OPS = new HashMap(); static final public HashMap<String,ASTOp> UDF_OPS = new HashMap(); // Too avoid a cyclic class-loading dependency, these are init'd before subclasses. static final String VARS1[] = new String[]{ "", "x"}; static final String VARS2[] = new String[]{ "", "x","y"}; static { // Unary infix ops putUniInfix(new ASTUniPlus()); putUniInfix(new ASTUniMinus()); putUniInfix(new ASTNot()); // Binary infix ops putBinInfix(new ASTPlus()); putBinInfix(new ASTSub()); putBinInfix(new ASTMul()); putBinInfix(new ASTDiv()); putBinInfix(new ASTPow()); putBinInfix(new ASTPow2()); putBinInfix(new ASTMod()); putBinInfix(new ASTMod2()); putBinInfix(new ASTAND()); putBinInfix(new ASTOR()); putBinInfix(new ASTLT()); putBinInfix(new ASTLE()); putBinInfix(new ASTGT()); putBinInfix(new ASTGE()); putBinInfix(new ASTEQ()); putBinInfix(new ASTNE()); putBinInfix(new ASTLA()); putBinInfix(new ASTLO()); putBinInfix(new ASTMMult()); putBinInfix(new ASTIntDiv()); putBinInfix(new ASTColSeq()); // Unary prefix ops putPrefix(new ASTIsNA()); putPrefix(new ASTNrow()); putPrefix(new ASTNcol()); putPrefix(new ASTLength()); putPrefix(new ASTAbs ()); putPrefix(new ASTSgn ()); putPrefix(new ASTSqrt()); putPrefix(new ASTCeil()); putPrefix(new ASTFlr ()); putPrefix(new ASTTrun()); putPrefix(new ASTRound()); putPrefix(new ASTSignif()); putPrefix(new ASTLog ()); putPrefix(new ASTExp ()); putPrefix(new ASTScale()); putPrefix(new ASTFactor()); putPrefix(new ASTNumeric()); putPrefix(new ASTIsFactor()); putPrefix(new ASTAnyFactor()); // For Runit testing putPrefix(new ASTCanBeCoercedToLogical()); putPrefix(new ASTAnyNA()); putPrefix(new ASTIsTRUE()); putPrefix(new ASTMTrans()); // Trigonometric functions putPrefix(new ASTCos()); putPrefix(new ASTSin()); putPrefix(new ASTTan()); putPrefix(new ASTACos()); putPrefix(new ASTASin()); putPrefix(new ASTATan()); putPrefix(new ASTCosh()); putPrefix(new ASTSinh()); putPrefix(new ASTTanh()); // Time extractions, to and from msec since the Unix Epoch putPrefix(new ASTYear ()); putPrefix(new ASTMonth ()); putPrefix(new ASTDay ()); putPrefix(new ASTHour ()); putPrefix(new ASTMinute()); putPrefix(new ASTSecond()); putPrefix(new ASTMillis()); putPrefix(new ASTasDate()); // Time series operations putPrefix(new ASTDiff ()); // More generic reducers putPrefix(new ASTMin ()); putPrefix(new ASTMax ()); putPrefix(new ASTSum ()); putPrefix(new ASTSdev()); putPrefix(new ASTVar()); putPrefix(new ASTMean()); putPrefix(new ASTMedian()); putPrefix(new ASTMostCommon()); putPrefix(new ASTMinNaRm()); putPrefix(new ASTMaxNaRm()); putPrefix(new ASTSumNaRm()); putPrefix(new ASTXorSum ()); // Misc putPrefix(new ASTSeq ()); putPrefix(new ASTSeqLen()); putPrefix(new ASTRepLen()); putPrefix(new ASTQtile ()); putPrefix(new ASTCat ()); putPrefix(new ASTCbind ()); putPrefix(new ASTTable ()); putPrefix(new ASTReduce()); putPrefix(new ASTIfElse()); putPrefix(new ASTRApply()); putPrefix(new ASTSApply()); putPrefix(new ASTddply ()); putPrefix(new ASTUnique()); putPrefix(new ASTRunif ()); putPrefix(new ASTCut ()); putPrefix(new ASTfindInterval()); putPrefix(new ASTPrint ()); putPrefix(new ASTLs ()); putPrefix(new ASTStrSplit()); putPrefix(new ASTToLower()); putPrefix(new ASTToUpper()); putPrefix(new ASTGSub()); putPrefix(new ASTStrSub()); putPrefix(new ASTRevalue()); putPrefix(new ASTWhich()); putPrefix(new ASTTrim()); putPrefix(new ASTSample()); } static private boolean isReserved(String fn) { return UNI_INFIX_OPS.containsKey(fn) || BIN_INFIX_OPS.containsKey(fn) || PREFIX_OPS.containsKey(fn); } static private void putUniInfix(ASTOp ast) { UNI_INFIX_OPS.put(ast.opStr(),ast); } static private void putBinInfix(ASTOp ast) { BIN_INFIX_OPS.put(ast.opStr(),ast); } static private void putPrefix (ASTOp ast) { PREFIX_OPS.put(ast.opStr(),ast); } static void putUDF (ASTOp ast, String fn) { if (isReserved(fn)) throw new IllegalArgumentException("Trying to overload a reserved method: "+fn+". Must not overload a reserved method with a user-defined function."); if (UDF_OPS.containsKey(fn)) removeUDF(fn); UDF_OPS.put(fn,ast); } static void removeUDF (String fn) { UDF_OPS.remove(fn); } static public ASTOp isOp(String id) { // This order matters. If used as a prefix OP, `+` and `-` are binary only. ASTOp op4 = UDF_OPS.get(id); if( op4 != null ) return op4; return isBuiltinOp(id); } static public ASTOp isBuiltinOp(String id) { ASTOp op3 = PREFIX_OPS.get(id); if( op3 != null ) return op3; ASTOp op2 = BIN_INFIX_OPS.get(id); if( op2 != null ) return op2; ASTOp op1 = UNI_INFIX_OPS.get(id); return op1; } static public boolean isInfixOp(String id) { return BIN_INFIX_OPS.containsKey(id) || UNI_INFIX_OPS.containsKey(id); } static public boolean isUDF(String id) { return UDF_OPS.containsKey(id); } static public boolean isUDF(ASTOp op) { return isUDF(op.opStr()); } static public Set<String> opStrs() { Set<String> all = UNI_INFIX_OPS.keySet(); all.addAll(BIN_INFIX_OPS.keySet()); all.addAll(PREFIX_OPS.keySet()); all.addAll(UDF_OPS.keySet()); return all; } final int _form; // formula notation, 0 - infix, 1 - prefix final int _precedence; // operator precedence number final int _association; // 0 - left associated, 1 - right associated // All fields are final, because functions are immutable final String _vars[]; // Variable names ASTOp( String vars[], Type ts[], int form, int prec, int asso) { super(Type.fcn(ts)); _form = form; _precedence = prec; _association = asso; _vars = vars; assert ts.length==vars.length : "No vars?" + this; } ASTOp( String vars[], Type t, int form, int prec, int asso) { super(t); _form = form; _precedence = prec; _association = asso; _vars = vars; assert t._ts.length==vars.length : "No vars?" + this; } abstract String opStr(); abstract ASTOp make(); public boolean leftAssociate( ) { return _association == OPA_LEFT; } @Override public String toString() { String s = _t._ts[0]+" "+opStr()+"("; int len=_t._ts.length; for( int i=1; i<len-1; i++ ) s += _t._ts[i]+" "+(_vars==null?"":_vars[i])+", "; return s + (len > 1 ? _t._ts[len-1]+" "+(_vars==null?"":_vars[len-1]) : "")+")"; } public String toString(boolean verbose) { if( !verbose ) return toString(); // Just the fun name& arg names return toString(); } static ASTOp parse(Exec2 E) { int x = E._x; String id = E.isID(); if( id == null ) return null; ASTOp op = isOp(id); // The order matters. If used as a prefix OP, `+` and `-` are binary only. // Also, if assigning to a built-in function then do not parse-as-a-fcn. // Instead it will default to parsing as an ID in ASTAssign.parse if( op != null ) { int x1 = E._x; if (!E.peek('=') && !(E.peek('<') && E.peek('-'))) { E._x = x1; return op.make(); } } E._x = x; return ASTFunc.parseFcn(E); } // Parse a unary infix OP or return null. static ASTOp parseUniInfixOp(Exec2 E) { int x = E._x; String id = E.isID(); if( id == null ) return null; ASTOp op = UNI_INFIX_OPS.get(id); if( op != null) return op.make(); E._x = x; // Roll back, no parse happened return null; } // Parse a binary infix OP or return null. static ASTOp parseBinInfixOp(Exec2 E) { int x = E._x; String id = E.isID(); if( id == null ) return null; ASTOp op = BIN_INFIX_OPS.get(id); if( op != null) return op.make(); E._x = x; // Roll back, no parse happened return null; } @Override void exec(Env env) { env.push(this); } // Standard column-wise function application abstract void apply(Env env, int argcnt, ASTApply apply); // Special row-wise 'apply' double[] map(Env env, double[] in, double[] out) { throw H2O.unimpl(); } } abstract class ASTUniOp extends ASTOp { static Type[] newsig() { Type t1 = Type.dblary(); return new Type[]{t1,t1}; } ASTUniOp( int form, int precedence, int association ) { super(VARS1,newsig(),form,precedence,association); } double op( double d ) { throw H2O.fail(); } protected ASTUniOp( String[] vars, Type[] types, int form, int precedence, int association ) { super(vars,types,form,precedence,association); } @Override void apply(Env env, int argcnt, ASTApply apply) { // Expect we can broadcast across all functions as needed. if( !env.isAry() ) { env.poppush(op(env.popDbl())); return; } Frame fr = env.popAry(); String skey = env.key(); final ASTUniOp uni = this; // Final 'this' so can use in closure Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { for( int i=0; i<nchks.length; i++ ) { NewChunk n =nchks[i]; Chunk c = chks[i]; int rlen = c._len; for( int r=0; r<rlen; r++ ) n.addNum(uni.op(c.at0(r))); } } }.doAll(fr.numCols(),fr).outputFrame(fr._names, null); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } } abstract class ASTUniPrefixOp extends ASTUniOp { ASTUniPrefixOp( ) { super(OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); } ASTUniPrefixOp( String[] vars, Type[] types ) { super(vars,types,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); } } class ASTCos extends ASTUniPrefixOp { @Override String opStr(){ return "cos"; } @Override ASTOp make() {return new ASTCos ();} @Override double op(double d) { return Math.cos(d);}} class ASTSin extends ASTUniPrefixOp { @Override String opStr(){ return "sin"; } @Override ASTOp make() {return new ASTSin ();} @Override double op(double d) { return Math.sin(d);}} class ASTTan extends ASTUniPrefixOp { @Override String opStr(){ return "tan"; } @Override ASTOp make() {return new ASTTan ();} @Override double op(double d) { return Math.tan(d);}} class ASTACos extends ASTUniPrefixOp { @Override String opStr(){ return "acos"; } @Override ASTOp make() {return new ASTACos();} @Override double op(double d) { return Math.acos(d);}} class ASTASin extends ASTUniPrefixOp { @Override String opStr(){ return "asin"; } @Override ASTOp make() {return new ASTASin();} @Override double op(double d) { return Math.asin(d);}} class ASTATan extends ASTUniPrefixOp { @Override String opStr(){ return "atan"; } @Override ASTOp make() {return new ASTATan();} @Override double op(double d) { return Math.atan(d);}} class ASTCosh extends ASTUniPrefixOp { @Override String opStr(){ return "cosh"; } @Override ASTOp make() {return new ASTCosh ();} @Override double op(double d) { return Math.cosh(d);}} class ASTSinh extends ASTUniPrefixOp { @Override String opStr(){ return "sinh"; } @Override ASTOp make() {return new ASTSinh ();} @Override double op(double d) { return Math.sinh(d);}} class ASTTanh extends ASTUniPrefixOp { @Override String opStr(){ return "tanh"; } @Override ASTOp make() {return new ASTTanh ();} @Override double op(double d) { return Math.tanh(d);}} class ASTAbs extends ASTUniPrefixOp { @Override String opStr(){ return "abs"; } @Override ASTOp make() {return new ASTAbs ();} @Override double op(double d) { return Math.abs(d);}} class ASTSgn extends ASTUniPrefixOp { @Override String opStr(){ return "sgn" ; } @Override ASTOp make() {return new ASTSgn ();} @Override double op(double d) { return Math.signum(d);}} class ASTSqrt extends ASTUniPrefixOp { @Override String opStr(){ return "sqrt"; } @Override ASTOp make() {return new ASTSqrt();} @Override double op(double d) { return Math.sqrt(d);}} class ASTCeil extends ASTUniPrefixOp { @Override String opStr(){ return "ceil"; } @Override ASTOp make() {return new ASTCeil();} @Override double op(double d) { return Math.ceil(d);}} class ASTFlr extends ASTUniPrefixOp { @Override String opStr(){ return "floor"; } @Override ASTOp make() {return new ASTFlr ();} @Override double op(double d) { return Math.floor(d);}} class ASTTrun extends ASTUniPrefixOp { @Override String opStr(){ return "trunc"; } @Override ASTOp make() {return new ASTTrun();} @Override double op(double d) { return d>=0?Math.floor(d):Math.ceil(d);}} class ASTLog extends ASTUniPrefixOp { @Override String opStr(){ return "log"; } @Override ASTOp make() {return new ASTLog ();} @Override double op(double d) { return Math.log(d);}} class ASTExp extends ASTUniPrefixOp { @Override String opStr(){ return "exp"; } @Override ASTOp make() {return new ASTExp ();} @Override double op(double d) { return Math.exp(d);}} //class ASTIsNA extends ASTUniPrefixOp { @Override String opStr(){ return "is.na"; } @Override ASTOp make() {return new ASTIsNA();} @Override double op(double d) { return Double.isNaN(d)?1:0;}} class ASTIsNA extends ASTUniPrefixOp { @Override String opStr(){ return "is.na";} @Override ASTOp make() { return new ASTIsNA();} @Override double op(double d) { return Double.isNaN(d)?1:0;} @Override void apply(Env env, int argcnt, ASTApply apply) { // Expect we can broadcast across all functions as needed. if( !env.isAry() ) { env.poppush(op(env.popDbl())); return; } Frame fr = env.popAry(); String skey = env.key(); final ASTUniOp uni = this; // Final 'this' so can use in closure Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { for( int i=0; i<nchks.length; i++ ) { NewChunk n = nchks[i]; Chunk c = chks[i]; int rlen = c._len; for( int r=0; r<rlen; r++ ) n.addNum( ( c.isNA0(r) || isNA0(c, r)) ? 1 : 0); } } }.doAll(fr.numCols(),fr).outputFrame(fr._names, null); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } private boolean isNA0(Chunk c, int row0) { if (c._vec.isEnum()) { if (c._vec.domain()[(int) c.at0(row0)].equals("NA")) return true; } return false; } } class ASTWhich extends ASTOp { ASTWhich() { super(new String[]{"which", "x"}, new Type[]{Type.dblary(), Type.dblary()}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT);} @Override String opStr() { return "which"; } @Override ASTOp make() { return new ASTWhich(); } @Override void apply(Env env, int argcnt, ASTApply apply) { if(env.isAry()) { Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("`which` accepts at exactly 1 column!"); String skey = env.key(); Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { for (int r = 0; r < chk._len; ++r) if (chk.at0(r) == 1) nchk.addNum(chk._start + r + 1); } }.doAll(1,fr).outputFrame(new String[]{"which"},null); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } } } class ASTRound extends ASTOp { @Override String opStr() { return "round"; } ASTRound() { super(new String[]{"round", "x", "digits"}, new Type[]{Type.dblary(), Type.dblary(), Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { final int digits = (int)env.popDbl(); if(env.isAry()) { Frame fr = env.popAry(); for(int i = 0; i < fr.vecs().length; i++) { if(fr.vecs()[i].isEnum()) throw new IllegalArgumentException("Non-numeric column " + String.valueOf(i+1) + " in data frame"); } String skey = env.key(); Frame fr2 = new MRTask2() { @Override public void map(Chunk chks[], NewChunk nchks[]) { for(int i = 0; i < nchks.length; i++) { NewChunk n = nchks[i]; Chunk c = chks[i]; int rlen = c._len; for(int r = 0; r < rlen; r++) n.addNum(roundDigits(c.at0(r),digits)); } } }.doAll(fr.numCols(),fr).outputFrame(fr.names(),fr.domains()); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } else env.poppush(roundDigits(env.popDbl(),digits)); } static double roundDigits(double x, int digits) { if(Double.isNaN(x)) return x; BigDecimal bd = new BigDecimal(x); bd = bd.setScale(digits, RoundingMode.HALF_EVEN); return bd.doubleValue(); } } class ASTSignif extends ASTOp { @Override String opStr() { return "signif"; } ASTSignif() { super(new String[]{"signif", "x", "digits"}, new Type[]{Type.dblary(), Type.dblary(), Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { final int digits = (int)env.popDbl(); if(digits < 0) throw new IllegalArgumentException("Error in signif: argument digits must be a non-negative integer"); if(env.isAry()) { Frame fr = env.popAry(); for(int i = 0; i < fr.vecs().length; i++) { if(fr.vecs()[i].isEnum()) throw new IllegalArgumentException("Non-numeric column " + String.valueOf(i+1) + " in data frame"); } String skey = env.key(); Frame fr2 = new MRTask2() { @Override public void map(Chunk chks[], NewChunk nchks[]) { for(int i = 0; i < nchks.length; i++) { NewChunk n = nchks[i]; Chunk c = chks[i]; int rlen = c._len; for(int r = 0; r < rlen; r++) n.addNum(signifDigits(c.at0(r),digits)); } } }.doAll(fr.numCols(),fr).outputFrame(fr.names(),fr.domains()); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } else env.poppush(signifDigits(env.popDbl(),digits)); } static double signifDigits(double x, int digits) { if(Double.isNaN(x)) return x; BigDecimal bd = new BigDecimal(x); bd = bd.round(new MathContext(digits, RoundingMode.HALF_EVEN)); return bd.doubleValue(); } } class ASTNrow extends ASTUniPrefixOp { ASTNrow() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "nrow"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.popAry(); String skey = env.key(); double d = fr.numRows(); env.subRef(fr,skey); env.poppush(d); } } class ASTNcol extends ASTUniPrefixOp { ASTNcol() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "ncol"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.popAry(); String skey = env.key(); double d = fr.numCols(); env.subRef(fr,skey); env.poppush(d); } } class ASTLength extends ASTUniPrefixOp { ASTLength() { super(VARS1, new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "length"; } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.popAry(); String skey = env.key(); double d = fr.numCols() == 1 ? fr.numRows() : fr.numCols(); env.subRef(fr,skey); env.poppush(d); } } class ASTIsFactor extends ASTUniPrefixOp { ASTIsFactor() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "is.factor"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry()) { env.poppush(0); return; } Frame fr = env.popAry(); String skey = env.key(); double d = 1; Vec[] v = fr.vecs(); for(int i = 0; i < v.length; i++) { if(!v[i].isEnum()) { d = 0; break; } } env.subRef(fr,skey); env.poppush(d); } } // Added to facilitate Runit testing class ASTAnyFactor extends ASTUniPrefixOp { ASTAnyFactor() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "any.factor"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry()) { env.poppush(0); return; } Frame fr = env.popAry(); String skey = env.key(); double d = 0; Vec[] v = fr.vecs(); for(int i = 0; i < v.length; i++) { if(v[i].isEnum()) { d = 1; break; } } env.subRef(fr,skey); env.poppush(d); } } class ASTCanBeCoercedToLogical extends ASTUniPrefixOp { ASTCanBeCoercedToLogical() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "canBeCoercedToLogical"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry()) { env.poppush(0); return; } Frame fr = env.popAry(); String skey = env.key(); double d = 0; Vec[] v = fr.vecs(); for (Vec aV : v) { if (aV.isInt()) { if ((aV.min() == 0 && aV.max() == 1) || (aV.min() == 0 && aV.min() == aV.max()) || (aV.min() == 1 && aV.min() == aV.max())) { d = 1; break; } } } env.subRef(fr,skey); env.poppush(d); } } class ASTAnyNA extends ASTUniPrefixOp { ASTAnyNA() { super(VARS1,new Type[]{Type.DBL,Type.ARY}); } @Override String opStr() { return "any.na"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry()) { env.poppush(0); return; } Frame fr = env.popAry(); String skey = env.key(); double d = 0; Vec[] v = fr.vecs(); for(int i = 0; i < v.length; i++) { if(v[i].naCnt() > 0) { d = 1; break; } } env.subRef(fr, skey); env.poppush(d); } } class ASTIsTRUE extends ASTUniPrefixOp { ASTIsTRUE() {super(VARS1,new Type[]{Type.DBL,Type.unbound()});} @Override String opStr() { return "isTRUE"; } @Override ASTOp make() {return new ASTIsTRUE();} // to make sure fcn get bound at each new context @Override void apply(Env env, int argcnt, ASTApply apply) { double res = env.isDbl() && env.popDbl()==1.0 ? 1:0; env.pop(); env.poppush(res); } } class ASTScale extends ASTUniPrefixOp { ASTScale() { super(VARS1,new Type[]{Type.ARY,Type.ARY}); } @Override String opStr() { return "scale"; } @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry()) { env.poppush(Double.NaN); return; } Frame fr = env.popAry(); String skey = env.key(); Frame fr2 = new Scale().doIt(fr.numCols(), fr).outputFrame(fr._names, fr.domains()); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } private static class Scale extends MRTask2<Scale> { protected int _nums = 0; protected int[] _ind; // Saves indices of numeric cols first, followed by enums protected double[] _normSub; protected double[] _normMul; @Override public void map(Chunk chks[], NewChunk nchks[]) { // Normalize numeric cols only for(int k = 0; k < _nums; k++) { int i = _ind[k]; NewChunk n = nchks[i]; Chunk c = chks[i]; int rlen = c._len; for(int r = 0; r < rlen; r++) n.addNum((c.at0(r)-_normSub[i])*_normMul[i]); } for(int k = _nums; k < chks.length; k++) { int i = _ind[k]; NewChunk n = nchks[i]; Chunk c = chks[i]; int rlen = c._len; for(int r = 0; r < rlen; r++) n.addNum(c.at0(r)); } } public Scale doIt(int outputs, Frame fr) { return dfork2(outputs, fr).getResult(); } public Scale dfork2(int outputs, Frame fr) { final Vec [] vecs = fr.vecs(); for(int i = 0; i < vecs.length; i++) { if(!vecs[i].isEnum()) _nums++; } if(_normSub == null) _normSub = MemoryManager.malloc8d(_nums); if(_normMul == null) { _normMul = MemoryManager.malloc8d(_nums); Arrays.fill(_normMul,1); } if(_ind == null) _ind = MemoryManager.malloc4(vecs.length); int ncnt = 0; int ccnt = 0; for(int i = 0; i < vecs.length; i++){ if(!vecs[i].isEnum()) { _normSub[ncnt] = vecs[i].mean(); _normMul[ncnt] = 1.0/vecs[i].sigma(); _ind[ncnt++] = i; } else _ind[_nums+(ccnt++)] = i; } assert ncnt == _nums && (ncnt + ccnt == vecs.length); return dfork(outputs, fr, false); } } } // ---- abstract class ASTTimeOp extends ASTOp { static Type[] newsig() { Type t1 = Type.dblary(); return new Type[]{t1,t1}; } ASTTimeOp() { super(VARS1,newsig(),OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); } abstract long op( MutableDateTime dt ); @Override void apply(Env env, int argcnt, ASTApply apply) { // Single instance of MDT for the single call if( !env.isAry() ) { // Single point double d = env.popDbl(); if( !Double.isNaN(d) ) d = op(new MutableDateTime((long)d)); env.poppush(d); return; } // Whole column call Frame fr = env.popAry(); String skey = env.key(); final ASTTimeOp uni = this; // Final 'this' so can use in closure Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { MutableDateTime dt = new MutableDateTime(0); for( int i=0; i<nchks.length; i++ ) { NewChunk n =nchks[i]; Chunk c = chks[i]; int rlen = c._len; for( int r=0; r<rlen; r++ ) { double d = c.at0(r); if( !Double.isNaN(d) ) { dt.setMillis((long)d); d = uni.op(dt); } n.addNum(d); } } } }.doAll(fr.numCols(),fr).outputFrame(fr._names, null); env.subRef(fr,skey); env.pop(); // Pop self env.push(fr2); } } class ASTYear extends ASTTimeOp { @Override String opStr(){ return "year" ; } @Override ASTOp make() {return new ASTYear ();} @Override long op(MutableDateTime dt) { return dt.getYear();}} class ASTMonth extends ASTTimeOp { @Override String opStr(){ return "month"; } @Override ASTOp make() {return new ASTMonth ();} @Override long op(MutableDateTime dt) { return dt.getMonthOfYear()-1;}} class ASTDay extends ASTTimeOp { @Override String opStr(){ return "day" ; } @Override ASTOp make() {return new ASTDay ();} @Override long op(MutableDateTime dt) { return dt.getDayOfMonth();}} class ASTHour extends ASTTimeOp { @Override String opStr(){ return "hour" ; } @Override ASTOp make() {return new ASTHour ();} @Override long op(MutableDateTime dt) { return dt.getHourOfDay();}} class ASTMinute extends ASTTimeOp { @Override String opStr(){return "minute";} @Override ASTOp make() {return new ASTMinute();} @Override long op(MutableDateTime dt) { return dt.getMinuteOfHour();}} class ASTSecond extends ASTTimeOp { @Override String opStr(){return "second";} @Override ASTOp make() {return new ASTSecond();} @Override long op(MutableDateTime dt) { return dt.getSecondOfMinute();}} class ASTMillis extends ASTTimeOp { @Override String opStr(){return "millis";} @Override ASTOp make() {return new ASTMillis();} @Override long op(MutableDateTime dt) { return dt.getMillisOfSecond();}} class ASTasDate extends ASTOp { ASTasDate() { super(new String[]{"as.Date", "x", "format"}, new Type[]{Type.ARY, Type.ARY, Type.STR}, OPF_PREFIX, OPP_PREFIX,OPA_RIGHT); } @Override String opStr() { return "as.Date"; } @Override ASTOp make() {return new ASTasDate();} @Override void apply(Env env, int argcnt, ASTApply apply) { final String format = env.popStr(); if (format.isEmpty()) throw new IllegalArgumentException("as.Date requires a non-empty format string"); // check the format string more? Frame fr = env.ary(-1); if( fr.vecs().length != 1 || !fr.vecs()[0].isEnum() ) throw new IllegalArgumentException("as.Date requires a single column of factors"); Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { //done on each node in lieu of rewriting DateTimeFormatter as Iced DateTimeFormatter dtf = ParseTime.forStrptimePattern(format); for( int i=0; i<nchks.length; i++ ) { NewChunk n =nchks[i]; Chunk c = chks[i]; int rlen = c._len; for( int r=0; r<rlen; r++ ) { if (!c.isNA0(r)) { String date = c._vec.domain((long)c.at0(r)); n.addNum(DateTime.parse(date, dtf).getMillis(), 0); } else n.addNA(); } } } }.doAll(fr.numCols(),fr).outputFrame(fr._names, null); env.poppush(2, fr2, null); } } class ASTStrSplit extends ASTOp { ASTStrSplit() { super(new String[]{"strsplit", "x", "split"}, new Type[]{Type.ARY, Type.ARY, Type.STR}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "strsplit"; } @Override ASTOp make() { return new ASTStrSplit(); } @Override void apply(Env env, int argcnt, ASTApply apply) { String split = env.popStr(); Frame fr = env.ary(-1); if (fr.numCols() != 1) throw new IllegalArgumentException("strsplit requires a single column."); split = split.isEmpty() ? "" : split; final String[] old_domains = fr.anyVec().domain(); final String[][] new_domains = newDomains(old_domains, split); final String[] col_names = new String[new_domains.length]; for (int i = 1; i <= col_names.length; ++i) col_names[i-1] = "C"+i; final String regex = split; Frame fr2 = new MRTask2() { @Override public void map(Chunk[] cs, NewChunk[] ncs) { Chunk c = cs[0]; for (int i = 0; i < c._len; ++i) { int idx = (int)c.at0(i); String s = old_domains[idx]; String[] ss = s.split(regex); int cnt = 0; for (int j = 0; j < ss.length; ++j) { int n_idx = Arrays.asList(new_domains[cnt]).indexOf(ss[j]); if (n_idx == -1) ncs[cnt++].addNA(); else ncs[cnt++].addNum(n_idx); } if (cnt < ncs.length) for (; cnt < ncs.length; ++cnt) ncs[cnt].addNA(); } } }.doAll(col_names.length, fr).outputFrame(col_names, new_domains); env.poppush(2, fr2, null); } private String[][] newDomains(String[] domains, String regex) { ArrayList<HashSet<String>> strs = new ArrayList<HashSet<String>>(); for (String domain : domains) { String[] news = domain.split(regex); for (int i = 0; i < news.length; ++i) { if (strs.size() == i) { HashSet<String> x = new HashSet<String>(); x.add(news[i]); strs.add(x); } else { HashSet<String> x = strs.get(i); x.add(news[i]); strs.set(i, x); } } } String[][] doms = new String[strs.size()][]; for (int i = 0; i < strs.size(); ++i) { HashSet<String> x = strs.get(i); doms[i] = new String[x.size()]; for (int j = 0; j < x.size(); ++j) doms[i][j] = (String)x.toArray()[j]; } return doms; } } class ASTToLower extends ASTUniPrefixOp { @Override String opStr() { return "tolower"; } @Override ASTOp make() { return new ASTToLower(); } @Override void apply(Env env, int argcnt, ASTApply apply) { if( !env.isAry() ) { throw new IllegalArgumentException("tolower only operates on a single vector!"); } Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("tolower only takes a single column of data. Got "+ fr.numCols()+" columns."); String skey = env.key(); String[] new_dom = fr.anyVec().domain().clone(); for (int i = 0; i < new_dom.length; ++i) new_dom[i] = new_dom[i].toLowerCase(Locale.ENGLISH); Frame fr2 = new Frame(fr._names, fr.vecs()); fr2.anyVec()._domain = new_dom; env.subRef(fr,skey); env.pop(); env.push(fr2); } } class ASTToUpper extends ASTUniPrefixOp { @Override String opStr() { return "toupper"; } @Override ASTOp make() { return new ASTToUpper(); } @Override void apply(Env env, int argcnt, ASTApply apply) { if( !env.isAry() ) { throw new IllegalArgumentException("toupper only operates on a single vector!"); } Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("toupper only takes a single column of data. Got "+ fr.numCols()+" columns."); String skey = env.key(); String[] new_dom = fr.anyVec().domain().clone(); for (int i = 0; i < new_dom.length; ++i) new_dom[i] = new_dom[i].toUpperCase(Locale.ENGLISH); Frame fr2 = new Frame(fr._names, fr.vecs()); fr2.anyVec()._domain = new_dom; env.subRef(fr,skey); env.pop(); env.push(fr2); } } class ASTRevalue extends ASTOp { ASTRevalue(){ super(new String[]{"revalue", "x", "replace", "warn_missing"}, new Type[]{Type.ARY, Type.ARY, Type.STR, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "revalue"; } @Override ASTOp make() { return new ASTRevalue(); } @Override void apply(Env env, int argcnt, ASTApply apply) { final boolean warn_missing = env.popDbl() == 1; final String replace = env.popStr(); String skey = env.key(); Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("revalue works on a single column at a time."); String[] old_dom = fr.anyVec()._domain; if (old_dom == null) throw new IllegalArgumentException("Column is not a factor column. Can only revalue a factor column."); HashMap<String, String> dom_map = hashMap(replace); for (int i = 0; i < old_dom.length; ++i) { if (dom_map.containsKey(old_dom[i])) { old_dom[i] = dom_map.get(old_dom[i]); dom_map.remove(old_dom[i]); } } if (dom_map.size() > 0 && warn_missing) { for (String k : dom_map.keySet()) { env._warnings = Arrays.copyOf(env._warnings, env._warnings.length + 1); env._warnings[env._warnings.length - 1] = "Warning: old value " + k + " not a factor level."; } } } private HashMap<String, String> hashMap(String replace) { HashMap<String, String> map = new HashMap<String, String>(); //replace is a ';' separated string. Each piece after splitting is a key:value pair. String[] maps = replace.split(";"); for (String s : maps) { String[] pair = s.split(":"); String key = pair[0]; String value = pair[1]; map.put(key, value); } return map; } } class ASTGSub extends ASTOp { ASTGSub() { super(new String[]{"gsub", "pattern", "replacement", "x", "ignore.case"}, new Type[]{Type.ARY, Type.STR, Type.STR, Type.ARY, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "gsub"; } @Override ASTOp make() { return new ASTGSub(); } @Override void apply(Env env, int argcnt, ASTApply apply) { final boolean ignore_case = env.popDbl() == 1; String skey = env.key(); Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("gsub works on a single column at a time."); final String replacement = env.popStr(); final String pattern = env.popStr(); String[] doms = fr.anyVec().domain().clone(); for (int i = 0; i < doms.length; ++i) doms[i] = ignore_case ? doms[i].toLowerCase(Locale.ENGLISH).replaceAll(pattern, replacement) : doms[i].replaceAll(pattern, replacement); Frame fr2 = new Frame(fr.names(), fr.vecs()); fr2.anyVec()._domain = doms; env.subRef(fr, skey); env.poppush(1, fr2, null); } } class ASTTrim extends ASTOp { ASTTrim() { super(new String[]{"trim","x"}, new Type[]{Type.dblary(), Type.dblary()}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "trim"; } @Override ASTOp make() { return new ASTTrim(); } @Override void apply(Env env, int argcnt, ASTApply apply) { String skey = env.key(); Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("trim works on a single column at a time."); String[] doms = fr.anyVec().domain().clone(); for (int i = 0; i < doms.length; ++i) doms[i] = doms[i].trim(); Frame fr2 = new Frame(fr.names(), fr.vecs()); fr2.anyVec()._domain = doms; env.subRef(fr, skey); env.poppush(1, fr2, null); } } //FIXME: Create new chunks that overlay the frame to avoid ragged chunk issue class ASTSample extends ASTOp { ASTSample() { super(new String[]{"sample", "ary", "nobs", "seed"}, new Type[]{Type.ARY, Type.ARY, Type.DBL, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "sample"; } @Override ASTOp make() { return new ASTSample(); } @Override void apply(Env env, int argcnt, ASTApply apply) { final double seed = env.popDbl(); final double nobs = env.popDbl(); String skey = env.key(); Frame fr = env.popAry(); long[] espc = fr.anyVec()._espc; long[] chk_sizes = new long[espc.length]; final long[] css = new long[espc.length]; for (int i = 0; i < espc.length-1; ++i) chk_sizes[i] = espc[i+1] - espc[i]; chk_sizes[chk_sizes.length-1] = fr.numRows() - espc[espc.length-1]; long per_chunk_sample = (long) Math.floor(nobs / (double)espc.length); long defecit = (long) (nobs - per_chunk_sample*espc.length) ; // idxs is an array list of chunk indexes for adding to the sample size. Chunks with no defecit can not be "sampled" as candidates. ArrayList<Integer> idxs = new ArrayList<Integer>(); for (int i = 0; i < css.length; ++i) { // get the max allowed rows to sample from the chunk css[i] = Math.min(per_chunk_sample, chk_sizes[i]); // if per_chunk_sample > css[i] => spread around the defecit to meet number of rows requirement. long def = per_chunk_sample - css[i]; // no more "room" in chunk `i` if (def >= 0) { defecit += def; // else `i` has "room" } if (chk_sizes[i] > per_chunk_sample) idxs.add(i); } if (defecit > 0) { Random rng = new Random(seed != -1 ? (long)seed : System.currentTimeMillis()); while (defecit > 0) { if (idxs.size() <= 0) break; // select chunks at random and add to the number of rows they should sample, // up to the number of rows in the chunk. int rand = rng.nextInt(idxs.size()); if (css[idxs.get(rand)] == chk_sizes[idxs.get(rand)]) { idxs.remove(rand); continue; } css[idxs.get(rand)]++; defecit--; } } Frame fr2 = new MRTask2() { @Override public void map(Chunk[] chks, NewChunk[] nchks) { int N = chks[0]._len; int m = 0; long n = css[chks[0].cidx()]; int row = 0; Random rng = new Random(seed != -1 ? (long)seed : System.currentTimeMillis()); while( m < n) { double u = rng.nextDouble(); if ( (N - row)* u >= (n - m)) { row++; } else { for (int i = 0; i < chks.length; ++i) nchks[i].addNum(chks[i].at0(row)); row++; m++; } } } }.doAll(fr.numCols(), fr).outputFrame(fr.names(), fr.domains()); env.subRef(fr, skey); env.poppush(1, fr2, null); } } class ASTStrSub extends ASTOp { ASTStrSub() { super(new String[]{"sub", "pattern", "replacement", "x", "ignore.case"}, new Type[]{Type.ARY, Type.STR, Type.STR, Type.ARY, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "sub"; } @Override ASTOp make() { return new ASTStrSub(); } @Override void apply(Env env, int argcnt, ASTApply apply) { final boolean ignore_case = env.popDbl() == 1; String skey = env.key(); Frame fr = env.popAry(); if (fr.numCols() != 1) throw new IllegalArgumentException("sub works on a single column at a time."); final String replacement = env.popStr(); final String pattern = env.popStr(); String[] doms = fr.anyVec().domain().clone(); for (int i = 0; i < doms.length; ++i) doms[i] = ignore_case ? doms[i].toLowerCase(Locale.ENGLISH).replaceFirst(pattern, replacement) : doms[i].replaceFirst(pattern, replacement); Frame fr2 = new Frame(fr.names(), fr.vecs()); fr2.anyVec()._domain = doms; env.subRef(fr, skey); env.poppush(1, fr2, null); } } // Finite backward difference for user-specified lag // http://en.wikipedia.org/wiki/Finite_difference class ASTDiff extends ASTOp { ASTDiff() { super(new String[]{"diff", "x", "lag", "differences"}, new Type[]{Type.ARY, Type.ARY, Type.DBL, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "diff"; } @Override ASTOp make() {return new ASTDiff();} @Override void apply(Env env, int argcnt, ASTApply apply) { final int diffs = (int)env.popDbl(); if(diffs < 0) throw new IllegalArgumentException("differences must be an integer >= 1"); final int lag = (int)env.popDbl(); if(lag < 0) throw new IllegalArgumentException("lag must be an integer >= 1"); Frame fr = env.popAry(); String skey = env.key(); if(fr.vecs().length != 1 || fr.vecs()[0].isEnum()) throw new IllegalArgumentException("diff takes a single numeric column vector"); Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { int rstart = (int)(diffs*lag - chk._start); if(rstart > chk._len) return; rstart = Math.max(0, rstart); // Formula: \Delta_h^n x_t = \sum_{i=0}^n (-1)^i*\binom{n}{k}*x_{t-i*h} for(int r = rstart; r < chk._len; r++) { double x = chk.at0(r); long row = chk._start + r; for(int i = 1; i <= diffs; i++) { double x_lag = chk.at_slow(row - i*lag); double coef = ArithmeticUtils.binomialCoefficient(diffs, i); x += (i % 2 == 0) ? coef*x_lag : -coef*x_lag; } nchk.addNum(x); } } }.doAll(1,fr).outputFrame(fr.names(), fr.domains()); env.subRef(fr, skey); env.pop(); env.push(fr2); } } // ---- // Class of things that will auto-expand across arrays in a 2-to-1 way: // applying 2 things (from an array or scalar to array or scalar) producing an // array or scalar result. abstract class ASTBinOp extends ASTOp { static Type[] newsig() { Type t1 = Type.dblary(), t2 = Type.dblary(); return new Type[]{Type.anyary(new Type[]{t1,t2}),t1,t2}; } ASTBinOp( int form, int precedence, int association ) { super(VARS2, newsig(), form, precedence, association); // binary ops are infix ops } abstract double op( double d0, double d1 ); @Override void apply(Env env, int argcnt, ASTApply apply) { // Expect we can broadcast across all functions as needed. Frame fr0 = null, fr1 = null; double d0=0, d1=0; if( env.isAry() ) fr1 = env.popAry(); else d1 = env.popDbl(); String k0 = env.key(); if( env.isAry() ) fr0 = env.popAry(); else d0 = env.popDbl(); String k1 = env.key(); if( fr0==null && fr1==null ) { env.poppush(op(d0,d1)); return; } final boolean lf = fr0 != null; final boolean rf = fr1 != null; final double df0 = d0, df1 = d1; Frame fr = null; // Do-All frame int ncols = 0; // Result column count if( fr0 !=null ) { // Left? ncols = fr0.numCols(); if( fr1 != null ) { if( fr0.numCols() != fr1.numCols() || fr0.numRows() != fr1.numRows() ) throw new IllegalArgumentException("Arrays must be same size: LHS FRAME NUM ROWS/COLS: "+fr0.numRows()+"/"+fr0.numCols() +" vs RHS FRAME NUM ROWS/COLS: "+fr1.numRows()+"/"+fr1.numCols()); fr = new Frame(fr0).add(fr1,true); } else { fr = fr0; } } else { ncols = fr1.numCols(); fr = fr1; } final ASTBinOp bin = this; // Final 'this' so can use in closure // Run an arbitrary binary op on one or two frames & scalars Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { for( int i=0; i<nchks.length; i++ ) { NewChunk n =nchks[i]; int rlen = chks[0]._len; Chunk c0 = chks[i]; if( (!c0._vec.isEnum() && !(lf && rf && chks[i+nchks.length]._vec.isEnum())) || bin instanceof ASTEQ || bin instanceof ASTNE ) { for( int r=0; r<rlen; r++ ) { double lv; double rv; if (lf) { if(vecs(i).isUUID() || (chks[i].isNA0(r) && !bin.opStr().equals("|"))) { n.addNum(Double.NaN); continue; } lv = chks[i].at0(r); } else { if (Double.isNaN(df0) && !bin.opStr().equals("|")) { n.addNum(Double.NaN); continue; } lv = df0; } if (rf) { if(vecs(i+(lf ? nchks.length:0)).isUUID() || chks[i].isNA0(r) && !bin.opStr().equals("|")) { n.addNum(Double.NaN); continue; } rv = chks[i+(lf ? nchks.length:0)].at0(r); } else { if (Double.isNaN(df1) && !bin.opStr().equals("|")) { n.addNum(Double.NaN); continue; } rv = df1; } n.addNum(bin.op(lv, rv)); } } else { for( int r=0; r<rlen; r++ ) n.addNA(); } } } }.doAll(ncols,fr).outputFrame((lf ? fr0 : fr1)._names,null); if( fr0 != null ) env.subRef(fr0,k0); if( fr1 != null ) env.subRef(fr1,k1); env.pop(); env.push(fr2); } } class ASTUniPlus extends ASTUniOp { ASTUniPlus() { super(OPF_INFIX, OPP_UPLUS, OPA_RIGHT); } @Override String opStr(){ return "+" ;} @Override ASTOp make() {return new ASTUniPlus(); } @Override double op(double d) { return d;}} class ASTUniMinus extends ASTUniOp { ASTUniMinus() { super(OPF_INFIX, OPP_UMINUS, OPA_RIGHT); } @Override String opStr(){ return "-" ;} @Override ASTOp make() {return new ASTUniMinus();} @Override double op(double d) { return -d;}} class ASTNot extends ASTUniOp { ASTNot() { super(OPF_INFIX, OPP_NOT, OPA_RIGHT); } @Override String opStr(){ return "!" ;} @Override ASTOp make() {return new ASTNot(); } @Override double op(double d) { return d==0?1:0; }} class ASTPlus extends ASTBinOp { ASTPlus() { super(OPF_INFIX, OPP_PLUS, OPA_LEFT ); } @Override String opStr(){ return "+" ;} @Override ASTOp make() {return new ASTPlus();} @Override double op(double d0, double d1) { return d0+d1;}} class ASTSub extends ASTBinOp { ASTSub() { super(OPF_INFIX, OPP_MINUS, OPA_LEFT); } @Override String opStr(){ return "-" ;} @Override ASTOp make() {return new ASTSub ();} @Override double op(double d0, double d1) { return d0-d1;}} class ASTMul extends ASTBinOp { ASTMul() { super(OPF_INFIX, OPP_MUL, OPA_LEFT); } @Override String opStr(){ return "*" ;} @Override ASTOp make() {return new ASTMul ();} @Override double op(double d0, double d1) { return d0*d1;}} class ASTDiv extends ASTBinOp { ASTDiv() { super(OPF_INFIX, OPP_DIV, OPA_LEFT); } @Override String opStr(){ return "/" ;} @Override ASTOp make() {return new ASTDiv ();} @Override double op(double d0, double d1) { return d0/d1;}} class ASTPow extends ASTBinOp { ASTPow() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} @Override String opStr(){ return "^" ;} @Override ASTOp make() {return new ASTPow ();} @Override double op(double d0, double d1) { return Math.pow(d0,d1);}} class ASTPow2 extends ASTBinOp { ASTPow2() { super(OPF_INFIX, OPP_POWER, OPA_RIGHT);} @Override String opStr(){ return "**" ;} @Override ASTOp make() {return new ASTPow2();} @Override double op(double d0, double d1) { return Math.pow(d0,d1);}} class ASTMod extends ASTBinOp { ASTMod() { super(OPF_INFIX, OPP_MOD, OPA_LEFT); } @Override String opStr(){ return "%" ;} @Override ASTOp make() {return new ASTMod ();} @Override double op(double d0, double d1) { return d0%d1;}} class ASTMod2 extends ASTBinOp { ASTMod2() { super(OPF_INFIX, OPP_MOD, OPA_LEFT); } @Override String opStr(){ return "%%" ;} @Override ASTOp make() {return new ASTMod2 ();} @Override double op(double d0, double d1) { return d0%d1;}} class ASTLT extends ASTBinOp { ASTLT() { super(OPF_INFIX, OPP_LT, OPA_LEFT); } @Override String opStr(){ return "<" ;} @Override ASTOp make() {return new ASTLT ();} @Override double op(double d0, double d1) { return d0<d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}} class ASTLE extends ASTBinOp { ASTLE() { super(OPF_INFIX, OPP_LE, OPA_LEFT); } @Override String opStr(){ return "<=" ;} @Override ASTOp make() {return new ASTLE ();} @Override double op(double d0, double d1) { return d0<d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}} class ASTGT extends ASTBinOp { ASTGT() { super(OPF_INFIX, OPP_GT, OPA_LEFT); } @Override String opStr(){ return ">" ;} @Override ASTOp make() {return new ASTGT ();} @Override double op(double d0, double d1) { return d0>d1 && !Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}} class ASTGE extends ASTBinOp { ASTGE() { super(OPF_INFIX, OPP_GE, OPA_LEFT); } @Override String opStr(){ return ">=" ;} @Override ASTOp make() {return new ASTGE ();} @Override double op(double d0, double d1) { return d0>d1 || Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}} class ASTEQ extends ASTBinOp { ASTEQ() { super(OPF_INFIX, OPP_EQ, OPA_LEFT); } @Override String opStr(){ return "==" ;} @Override ASTOp make() {return new ASTEQ ();} @Override double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?1:0;}} class ASTNE extends ASTBinOp { ASTNE() { super(OPF_INFIX, OPP_NE, OPA_LEFT); } @Override String opStr(){ return "!=" ;} @Override ASTOp make() {return new ASTNE ();} @Override double op(double d0, double d1) { return Utils.equalsWithinOneSmallUlp(d0,d1)?0:1;}} class ASTLA extends ASTBinOp { ASTLA() { super(OPF_INFIX, OPP_AND, OPA_LEFT); } @Override String opStr(){ return "&" ;} @Override ASTOp make() {return new ASTLA ();} @Override double op(double d0, double d1) { return (d0!=0 && d1!=0) ? (Double.isNaN(d0) || Double.isNaN(d1)?Double.NaN:1) :0;}} class ASTLO extends ASTBinOp { ASTLO() { super(OPF_INFIX, OPP_OR, OPA_LEFT); } @Override String opStr(){ return "|" ;} @Override ASTOp make() {return new ASTLO ();} @Override double op(double d0, double d1) { if (d0 == 0 && Double.isNaN(d1)) { return Double.NaN; } if (d1 == 0 && Double.isNaN(d0)) { return Double.NaN; } if (Double.isNaN(d0) && Double.isNaN(d1)) { return Double.NaN; } if (d0 == 0 && d1 == 0) { return 0; } return 1; }} class ASTIntDiv extends ASTBinOp { ASTIntDiv() { super(OPF_INFIX, OPP_INTDIV, OPA_LEFT); } @Override String opStr(){ return "%/%";} @Override ASTOp make() {return new ASTIntDiv();} @Override double op(double d0, double d1) { return Math.floor(d0/d1); }} // Variable length; instances will be created of required length abstract class ASTReducerOp extends ASTOp { final double _init; boolean _narm; // na.rm in R ASTReducerOp( double init, boolean narm ) { super(new String[]{"","dbls"}, new Type[]{Type.DBL,Type.varargs(Type.dblary())}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); _init = init; _narm = narm; } @Override double[] map(Env env, double[] in, double[] out) { double s = _init; for (double v : in) if (!_narm || !Double.isNaN(v)) s = op(s,v); if (out == null || out.length < 1) out = new double[1]; out[0] = s; return out; } abstract double op( double d0, double d1 ); @Override void apply(Env env, int argcnt, ASTApply apply) { double sum=_init; for( int i=0; i<argcnt-1; i++ ) if( env.isDbl() ) sum = op(sum,env.popDbl()); else { Frame fr = env.popAry(); String skey = env.key(); sum = op(sum,_narm?new NaRmRedOp(this).doAll(fr)._d:new RedOp(this).doAll(fr)._d); env.subRef(fr,skey); } env.poppush(sum); } private static class RedOp extends MRTask2<RedOp> { final ASTReducerOp _bin; RedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; } double _d; @Override public void map( Chunk chks[] ) { for( int i=0; i<chks.length; i++ ) { Chunk C = chks[i]; for( int r=0; r<C._len; r++ ) _d = _bin.op(_d,C.at0(r)); if( Double.isNaN(_d) ) break; } } @Override public void reduce( RedOp s ) { _d = _bin.op(_d,s._d); } } private static class NaRmRedOp extends MRTask2<NaRmRedOp> { final ASTReducerOp _bin; NaRmRedOp( ASTReducerOp bin ) { _bin = bin; _d = bin._init; } double _d; @Override public void map( Chunk chks[] ) { for( int i=0; i<chks.length; i++ ) { Chunk C = chks[i]; for( int r=0; r<C._len; r++ ) if (!Double.isNaN(C.at0(r))) _d = _bin.op(_d,C.at0(r)); if( Double.isNaN(_d) ) break; } } @Override public void reduce( NaRmRedOp s ) { _d = _bin.op(_d,s._d); } } } class ASTSum extends ASTReducerOp { ASTSum( ) {super(0,false);} @Override String opStr(){ return "sum" ;} @Override ASTOp make() {return new ASTSum(); } @Override double op(double d0, double d1) { return d0+d1;}} class ASTSumNaRm extends ASTReducerOp { ASTSumNaRm( ) {super(0,true) ;} @Override String opStr(){ return "sum.na.rm";} @Override ASTOp make() {return new ASTSumNaRm();} @Override double op(double d0, double d1) { return d0+d1;}} class ASTReduce extends ASTOp { static final String VARS[] = new String[]{ "", "op2", "ary"}; static final Type TYPES[]= new Type []{ Type.ARY, Type.fcn(new Type[]{Type.DBL,Type.DBL,Type.DBL}), Type.ARY }; ASTReduce( ) { super(VARS,TYPES,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); } @Override String opStr(){ return "Reduce";} @Override ASTOp make() {return this;} @Override void apply(Env env, int argcnt, ASTApply apply) { throw H2O.unimpl(); } } // TODO: Check refcnt mismatch issue: tmp = cbind(h.hex,3.5) results in different refcnts per col class ASTCbind extends ASTOp { @Override String opStr() { return "cbind"; } ASTCbind( ) { super(new String[]{"cbind","ary"}, new Type[]{Type.ARY,Type.varargs(Type.dblary())}, OPF_PREFIX, OPP_PREFIX,OPA_RIGHT); } @Override ASTOp make() {return new ASTCbind(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Vec vmax = null; for(int i = 0; i < argcnt-1; i++) { if(env.isAry(-argcnt+1+i)) { Frame tmp = env.ary(-argcnt+1+i); if(vmax == null) vmax = tmp.vecs()[0]; else if(tmp.numRows() != vmax.length()) // R pads shorter cols to match max rows by cycling/repeating, but we won't support that throw new IllegalArgumentException("Row mismatch! Expected " + String.valueOf(vmax.length()) + " but frame has " + String.valueOf(tmp.numRows())); } } Frame fr = new Frame(new String[0],new Vec[0]); for(int i = 0; i < argcnt-1; i++) { if( env.isAry(-argcnt+1+i) ) { String name; Frame fr2 = env.ary(-argcnt+1+i); Frame fr3 = fr.makeCompatible(fr2); if( fr3 != fr2 ) { // If copied into a new Frame, need to adjust refs env.addRef(fr3); env.subRef(fr2,null); } // Take name from an embedded assign: "cbind(colNameX = some_frame, ...)" if( fr2.numCols()==1 && apply != null && (name = apply._args[i+1].argName()) != null ) { if (name.equals(fr3._key.toString())) fr.add(fr3,true); else fr.add(name, fr3.anyVec()); } else fr.add(fr3,true); } else { double d = env.dbl(-argcnt+1+i); Vec v = vmax == null ? Vec.make1Elem(d) : vmax.makeCon(d); fr.add("C" + String.valueOf(i+1), v); env.addRef(v); } } env._ary[env._sp-argcnt] = fr; env._fcn[env._sp-argcnt] = null; env._sp -= argcnt-1; Arrays.fill(env._ary,env._sp,env._sp+(argcnt-1),null); assert env.check_refcnt(fr.anyVec()); } } class ASTMinNaRm extends ASTReducerOp { ASTMinNaRm( ) { super( Double.POSITIVE_INFINITY, true ); } @Override String opStr(){ return "min.na.rm";} @Override ASTOp make() {return new ASTMinNaRm();} @Override double op(double d0, double d1) { return Math.min(d0, d1); } @Override void apply(Env env, int argcnt, ASTApply apply) { double min = Double.POSITIVE_INFINITY; int nacnt = 0; for( int i=0; i<argcnt-1; i++ ) if( env.isDbl() ) { double a = env.popDbl(); if (Double.isNaN(a)) nacnt++; else min = Math.min(min, a); } else { Frame fr = env.peekAry(); for (Vec v : fr.vecs()) min = Math.min(min, v.min()); env.pop(); } if (nacnt > 0 && min == Double.POSITIVE_INFINITY) min = Double.NaN; env.poppush(min); } } class ASTMaxNaRm extends ASTReducerOp { ASTMaxNaRm( ) { super( Double.NEGATIVE_INFINITY, true ); } @Override String opStr(){ return "max.na.rm";} @Override ASTOp make() {return new ASTMaxNaRm();} @Override double op(double d0, double d1) { return Math.max(d0,d1); } @Override void apply(Env env, int argcnt, ASTApply apply) { double max = Double.NEGATIVE_INFINITY; int nacnt = 0; for( int i=0; i<argcnt-1; i++ ) if( env.isDbl() ) { double a = env.popDbl(); if (Double.isNaN(a)) nacnt++; else max = Math.max(max, a); } else { Frame fr = env.peekAry(); for (Vec v : fr.vecs()) max = Math.max(max, v.max()); env.pop(); } if (nacnt > 0 && max == Double.NEGATIVE_INFINITY) max = Double.NaN; env.poppush(max); } } class ASTMin extends ASTReducerOp { ASTMin( ) { super( Double.POSITIVE_INFINITY, false); } @Override String opStr(){ return "min";} @Override ASTOp make() {return new ASTMin();} @Override double op(double d0, double d1) { return Math.min(d0, d1); } @Override void apply(Env env, int argcnt, ASTApply apply) { double min = Double.POSITIVE_INFINITY; for( int i=0; i<argcnt-1; i++ ) if( env.isDbl() ) min = Math.min(min, env.popDbl()); else { Frame fr = env.peekAry(); for (Vec v : fr.vecs()) if (v.naCnt() > 0) { min = Double.NaN; break; } else min = Math.min(min, v.min()); env.pop(); } env.poppush(min); } } class ASTMax extends ASTReducerOp { ASTMax( ) { super( Double.NEGATIVE_INFINITY, false ); } @Override String opStr(){ return "max";} @Override ASTOp make() {return new ASTMax();} @Override double op(double d0, double d1) { return Math.max(d0,d1); } @Override void apply(Env env, int argcnt, ASTApply apply) { double max = Double.NEGATIVE_INFINITY; for( int i=0; i<argcnt-1; i++ ) if( env.isDbl() ) max = Math.max(max, env.popDbl()); else { Frame fr = env.peekAry(); for (Vec v : fr.vecs()) if (v.naCnt() > 0) { max = Double.NaN; break; } else max = Math.max(max, v.max()); env.pop(); } env.poppush(max); } } // R like binary operator && class ASTAND extends ASTOp { @Override String opStr() { return "&&"; } ASTAND( ) { super(new String[]{"", "x", "y"}, new Type[]{Type.DBL,Type.dblary(),Type.dblary()}, OPF_PREFIX, OPP_AND, OPA_RIGHT); } @Override ASTOp make() { return new ASTAND(); } @Override void apply(Env env, int argcnt, ASTApply apply) { double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2); double op2 = op1==0 ? 0 : Double.isNaN(op1) ? Double.NaN : env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1); env.pop(3); if (!Double.isNaN(op2)) op2 = op2==0?0:1; env.push(op2); } } // R like binary operator || class ASTOR extends ASTOp { @Override String opStr() { return "||"; } ASTOR( ) { super(new String[]{"", "x", "y"}, new Type[]{Type.DBL,Type.dblary(),Type.dblary()}, OPF_PREFIX, OPP_OR, OPA_RIGHT); } @Override ASTOp make() { return new ASTOR(); } @Override void apply(Env env, int argcnt, ASTApply apply) { double op1 = env.isAry(-2) ? env.ary(-2).vecs()[0].at(0) : env.dbl(-2); double op2 = !Double.isNaN(op1) && op1!=0 ? 1 : env.isAry(-1) ? env.ary(-1).vecs()[0].at(0) : env.dbl(-1); if (!Double.isNaN(op2) && op2 != 0) op2 = 1; else if (op2 == 0 && Double.isNaN(op1)) op2 = Double.NaN; env.push(op2); } } // Brute force implementation of matrix multiply class ASTMMult extends ASTOp { @Override String opStr() { return "%*%"; } ASTMMult( ) { super(new String[]{"", "x", "y"}, new Type[]{Type.ARY,Type.ARY,Type.ARY}, OPF_PREFIX, OPP_MUL, OPA_RIGHT); } @Override ASTOp make() { return new ASTMMult(); } @Override void apply(Env env, int argcnt, ASTApply apply) { env.poppush(3,new Matrix(env.ary(-2)).mult(env.ary(-1)),null); } } // Brute force implementation of matrix transpose class ASTMTrans extends ASTOp { @Override String opStr() { return "t"; } ASTMTrans( ) { super(new String[]{"", "x"}, new Type[]{Type.ARY,Type.dblary()}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return new ASTMTrans(); } @Override void apply(Env env, int argcnt, ASTApply apply) { if(!env.isAry(-1)) { Key k = new Vec.VectorGroup().addVec(); Futures fs = new Futures(); AppendableVec avec = new AppendableVec(k); NewChunk chunk = new NewChunk(avec, 0); chunk.addNum(env.dbl(-1)); chunk.close(0, fs); Vec vec = avec.close(fs); fs.blockForPending(); vec._domain = null; Frame fr = new Frame(new String[] {"C1"}, new Vec[] {vec}); env.poppush(2,new Matrix(fr).trans(),null); } else env.poppush(2,new Matrix(env.ary(-1)).trans(),null); } } // Similar to R's seq_len class ASTSeqLen extends ASTOp { @Override String opStr() { return "seq_len"; } ASTSeqLen( ) { super(new String[]{"seq_len", "n"}, new Type[]{Type.ARY,Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { long len = (long)env.popDbl(); if (len <= 0) throw new IllegalArgumentException("Error in seq_len(" +len+"): argument must be coercible to positive integer"); env.poppush(1,new Frame(new String[]{"c"}, new Vec[]{Vec.makeSeq(len)}),null); } } class ASTColSeq extends ASTOp { @Override String opStr() { return ":"; } ASTColSeq() { super(new String[]{":", "from", "to"}, new Type[]{Type.dblary(), Type.DBL, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { double by = 1.0; double to = env.popDbl(); double from = env.popDbl(); double delta = to - from; if(delta == 0 && to == 0) env.poppush(to); else { double n = delta/by; if(n < 0) throw new IllegalArgumentException("wrong sign in 'by' argument"); else if(n > Double.MAX_VALUE) throw new IllegalArgumentException("'by' argument is much too small"); double dd = Math.abs(delta)/Math.max(Math.abs(from), Math.abs(to)); if(dd < 100*Double.MIN_VALUE) env.poppush(from); else { Key k = new Vec.VectorGroup().addVec(); Futures fs = new Futures(); AppendableVec av = new AppendableVec(k); NewChunk nc = new NewChunk(av, 0); int len = (int)n + 1; for (int r = 0; r < len; r++) nc.addNum(from + r*by); // May need to adjust values = by > 0 ? min(values, to) : max(values, to) nc.close(0, fs); Vec vec = av.close(fs); fs.blockForPending(); vec._domain = null; env.poppush(1, new Frame(new String[] {"C1"}, new Vec[] {vec}), null); } } } } // Same logic as R's generic seq method class ASTSeq extends ASTOp { @Override String opStr() { return "seq"; } ASTSeq() { super(new String[]{"seq", "from", "to", "by"}, new Type[]{Type.dblary(), Type.DBL, Type.DBL, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { double by = env.popDbl(); double to = env.popDbl(); double from = env.popDbl(); double delta = to - from; if(delta == 0 && to == 0) env.poppush(to); else { double n = delta/by; if(n < 0) throw new IllegalArgumentException("wrong sign in 'by' argument"); else if(n > Double.MAX_VALUE) throw new IllegalArgumentException("'by' argument is much too small"); double dd = Math.abs(delta)/Math.max(Math.abs(from), Math.abs(to)); if(dd < 100*Double.MIN_VALUE) env.poppush(from); else { Key k = new Vec.VectorGroup().addVec(); Futures fs = new Futures(); AppendableVec av = new AppendableVec(k); NewChunk nc = new NewChunk(av, 0); int len = (int)n + 1; for (int r = 0; r < len; r++) nc.addNum(from + r*by); // May need to adjust values = by > 0 ? min(values, to) : max(values, to) nc.close(0, fs); Vec vec = av.close(fs); fs.blockForPending(); vec._domain = null; env.poppush(1, new Frame(new String[] {"C1"}, new Vec[] {vec}), null); } } } } class ASTRepLen extends ASTOp { @Override String opStr() { return "rep_len"; } ASTRepLen() { super(new String[]{"rep_len", "x", "length.out"}, new Type[]{Type.dblary(), Type.DBL, Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() { return this; } @Override void apply(Env env, int argcnt, ASTApply apply) { if(env.isAry(-2)) H2O.unimpl(); else { long len = (long)env.popDbl(); if(len <= 0) throw new IllegalArgumentException("Error in rep_len: argument length.out must be coercible to a positive integer"); double x = env.popDbl(); env.poppush(1,new Frame(new String[]{"C1"}, new Vec[]{Vec.makeConSeq(x, len)}),null); } } } // Compute exact quantiles given a set of cutoffs, using multipass binning algo. class ASTQtile extends ASTOp { @Override String opStr() { return "quantile"; } ASTQtile( ) { super(new String[]{"quantile","x","probs"}, new Type[]{Type.ARY, Type.ARY, Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTQtile make() { return new ASTQtile(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame x = env.ary(-2); Vec xv = x .theVec("Argument #1 in Quantile contains more than 1 column."); Vec pv = env.ary(-1).theVec("Argument #2 in Quantile contains more than 1 column."); double p[] = new double[(int)pv.length()]; for (int i = 0; i < pv.length(); i++) { if ((p[i]=pv.at((long)i)) < 0 || p[i] > 1) throw new IllegalArgumentException("Quantile: probs must be in the range of [0, 1]."); } if ( xv.isEnum() ) { throw new IllegalArgumentException("Quantile: column type cannot be Enum."); } // create output vec Vec res = pv.makeCon(Double.NaN); final int MAX_ITERATIONS = 16; final int MAX_QBINS = 1000; // less uses less memory, can take more passes final boolean MULTIPASS = true; // approx in 1 pass if false // Type 7 matches R default final int INTERPOLATION = 7; // linear if quantile not exact on row. 2 uses mean. // a little obtuse because reusing first pass object, if p has multiple thresholds // since it's always the same (always had same valStart/End seed = vec min/max // some MULTIPASS conditionals needed if we were going to make this work for approx or exact final Quantiles[] qbins1 = new Quantiles.BinTask2(MAX_QBINS, xv.min(), xv.max()).doAll(xv)._qbins; for( int i=0; i<p.length; i++ ) { double quantile = p[i]; // need to pass a different threshold now for each finishUp! qbins1[0].finishUp(xv, new double[]{quantile}, INTERPOLATION, MULTIPASS); if( qbins1[0]._done ) { res.set(i,qbins1[0]._pctile[0]); } else { // the 2-N map/reduces are here (with new start/ends. MULTIPASS is implied Quantiles[] qbinsM = new Quantiles.BinTask2(MAX_QBINS, qbins1[0]._newValStart, qbins1[0]._newValEnd).doAll(xv)._qbins; for( int iteration = 2; iteration <= MAX_ITERATIONS; iteration++ ) { qbinsM[0].finishUp(xv, new double[]{quantile}, INTERPOLATION, MULTIPASS); if( qbinsM[0]._done ) { res.set(i,qbinsM[0]._pctile[0]); break; } // the 2-N map/reduces are here (with new start/ends. MULTIPASS is implied qbinsM = new Quantiles.BinTask2(MAX_QBINS, qbinsM[0]._newValStart, qbinsM[0]._newValEnd).doAll(xv)._qbins; } } } res.chunkForChunkIdx(0).close(0,null); res.postWrite(); env.poppush(argcnt, new Frame(new String[]{"Quantile"}, new Vec[]{res}), null); } } // Variable length; flatten all the component arys class ASTCat extends ASTOp { @Override String opStr() { return "c"; } ASTCat( ) { super(new String[]{"cat","dbls"}, new Type[]{Type.ARY,Type.varargs(Type.dblary())}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() {return new ASTCat();} @Override double[] map(Env env, double[] in, double[] out) { if (out == null || out.length < in.length) out = new double[in.length]; for (int i = 0; i < in.length; i++) out[i] = in[i]; return out; } @Override void apply(Env env, int argcnt, ASTApply apply) { Key key = Vec.VectorGroup.VG_LEN1.addVecs(1)[0]; AppendableVec av = new AppendableVec(key); NewChunk nc = new NewChunk(av,0); for( int i=0; i<argcnt-1; i++ ) { if (env.isAry(i-argcnt+1)) for (Vec vec : env.ary(i-argcnt+1).vecs()) { if (vec.nChunks() > 1) H2O.unimpl(); for (int r = 0; r < vec.length(); r++) nc.addNum(vec.at(r)); } else nc.addNum(env.dbl(i-argcnt+1)); } nc.close(0,null); Vec v = av.close(null); env.pop(argcnt); env.push(new Frame(new String[]{"C1"}, new Vec[]{v})); } } class ASTRunif extends ASTOp { @Override String opStr() { return "runif"; } ASTRunif() { super(new String[]{"runif","dbls","seed"}, new Type[]{Type.ARY,Type.ARY,Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override ASTOp make() {return new ASTRunif();} @Override void apply(Env env, int argcnt, ASTApply apply) { double temp = env.popDbl(); final long seed = (temp == -1) ? System.currentTimeMillis() : (long)temp; Frame fr = env.popAry(); String skey = env.key(); long [] espc = fr.anyVec()._espc; long rem = fr.numRows(); if(rem > espc[espc.length-1]) throw H2O.unimpl(); for(int i = 0; i < espc.length; ++i){ if(rem <= espc[i]){ espc = Arrays.copyOf(espc, i+1); break; } } espc[espc.length-1] = rem; Vec randVec = new Vec(fr.anyVec().group().addVecs(1)[0],espc); Futures fs = new Futures(); DKV.put(randVec._key,randVec, fs); for(int i = 0; i < espc.length-1; ++i) DKV.put(randVec.chunkKey(i),new C0DChunk(0,(int)(espc[i+1]-espc[i])),fs); fs.blockForPending(); new MRTask2() { @Override public void map(Chunk c){ Random rng = new Random(seed*c.cidx()); for(int i = 0; i < c._len; ++i) c.set0(i, (float)rng.nextDouble()); } }.doAll(randVec); env.subRef(fr,skey); env.pop(); env.push(new Frame(new String[]{"rnd"},new Vec[]{randVec})); } } class ASTSdev extends ASTOp { ASTSdev() { super(new String[]{"sd", "ary"}, new Type[]{Type.DBL,Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "sd"; } @Override ASTOp make() { return new ASTSdev(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.peekAry(); if (fr.vecs().length > 1) throw new IllegalArgumentException("sd does not apply to multiple cols."); if (fr.vecs()[0].isEnum()) throw new IllegalArgumentException("sd only applies to numeric vector."); double sig = fr.vecs()[0].sigma(); env.pop(); env.poppush(sig); } } class ASTVar extends ASTOp { ASTVar() { super(new String[]{"var", "ary"}, new Type[]{Type.dblary(),Type.dblary()}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "var"; } @Override ASTOp make() { return new ASTVar(); } @Override void apply(Env env, int argcnt, ASTApply apply) { if(env.isDbl()) { env.pop(2); env.push(Double.NaN); } else { Frame fr = env.ary(-1); String[] colnames = fr.names(); // Save standard deviations for later use double[] sdev = new double[fr.numCols()]; for(int i = 0; i < fr.numCols(); i++) sdev[i] = fr.vecs()[i].sigma(); // TODO: Might be more efficient to modify DataInfo to allow for separate standardization of mean and std dev DataInfo dinfo = new DataInfo(fr, 0, true, false, DataInfo.TransformType.STANDARDIZE); GramTask tsk = new GramTask(null, dinfo, false, false).doAll(dinfo._adaptedFrame); double[][] var = tsk._gram.getXX(); long nobs = tsk._nobs; assert sdev.length == var.length; assert sdev.length == var[0].length; // Just push the scalar if input is a single col if(var.length == 1 && var[0].length == 1) { env.pop(2); double x = var[0][0]*sdev[0]*sdev[0]; // Undo normalization of each col's standard deviation x = x*nobs/(nobs-1); // Divide by n-1 rather than n so unbiased env.push(x); } else { // Build output vecs for var-cov matrix Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(var.length); Vec[] vecs = new Vec[var.length]; for(int i = 0; i < var.length; i++) { AppendableVec v = new AppendableVec(keys[i]); NewChunk c = new NewChunk(v,0); v._domain = null; for (int j = 0; j < var[0].length; j++) { double x = var[i][j]*sdev[i]*sdev[j]; // Undo normalization of each col's standard deviation x = x*nobs/(nobs-1); // Divide by n-1 rather than n so unbiased c.addNum(x); } c.close(0, null); vecs[i] = v.close(null); } env.pop(2); env.push(new Frame(colnames, vecs)); } } } } class ASTMean extends ASTOp { ASTMean() { super(new String[]{"mean", "ary"}, new Type[]{Type.DBL,Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "mean"; } @Override ASTOp make() { return new ASTMean(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.peekAry(); if (fr.vecs().length > 1) throw new IllegalArgumentException("mean does not apply to multiple cols."); if (fr.vecs()[0].isEnum()) throw new IllegalArgumentException("mean only applies to numeric vector."); double ave = fr.vecs()[0].mean(); env.pop(); env.poppush(ave); } @Override double[] map(Env env, double[] in, double[] out) { if (out == null || out.length < 1) out = new double[1]; double s = 0; int cnt=0; for (double v : in) if( !Double.isNaN(v) ) { s+=v; cnt++; } out[0] = s/cnt; return out; } } class ASTMedian extends ASTOp { ASTMedian() { super(new String[]{"median", "ary"}, new Type[]{Type.DBL,Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "median"; } @Override ASTOp make() { return new ASTMedian(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.peekAry(); if (fr.vecs().length > 1) throw new IllegalArgumentException("median does not apply to multiple cols."); if (fr.vecs()[0].isEnum()) throw new IllegalArgumentException("median only applies to numeric vector."); QuantilesPage qp = new QuantilesPage(); qp.source_key = fr; qp.column = fr.anyVec(); qp.invoke(); double median = qp.result; env.pop(); env.poppush(median); } } class ASTMostCommon extends ASTOp { ASTMostCommon() { super(new String[]{"mode", "ary"}, new Type[]{Type.DBL,Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "mode"; } @Override ASTOp make() { return new ASTMostCommon(); } @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.peekAry(); if (fr.vecs().length > 1) throw new IllegalArgumentException("mode does not apply to multiple cols."); if (!fr.vecs()[0].isEnum()) throw new IllegalArgumentException("mode only applies to factor columns."); Vec column = fr.anyVec(); String dom[] = column.domain(); long[][] levels = new long[1][]; levels[0] = new Vec.CollectDomain(column).doAll(new Frame(column)).domain(); long[][] counts = new ASTTable.Tabularize(levels).doAll(column)._counts; long maxCounts = -1; int mode = -1; for (int i = 0; i < counts[0].length; ++i) { if (counts[0][i] > maxCounts && !dom[i].equals("NA")) { maxCounts = counts[0][i]; mode = i; } } double mc = mode != -1 ? (double)mode : (double)Arrays.asList(dom).indexOf("NA"); if (mc == -1) mc = Double.NaN; env.pop(); env.poppush(mc); } } class ASTXorSum extends ASTReducerOp { ASTXorSum() {super(0,false); } @Override String opStr(){ return "xorsum";} @Override ASTOp make() {return new ASTXorSum();} @Override double op(double d0, double d1) { long d0Bits = Double.doubleToLongBits(d0); long d1Bits = Double.doubleToLongBits(d1); long xorsumBits = d0Bits ^ d1Bits; // just need to not get inf or nan. If we zero the upper 4 bits, we won't final long ZERO_SOME_SIGN_EXP = 0x0fffffffffffffffL; xorsumBits = xorsumBits & ZERO_SOME_SIGN_EXP; double xorsum = Double.longBitsToDouble(xorsumBits); return xorsum; } @Override double[] map(Env env, double[] in, double[] out) { if (out == null || out.length < 1) out = new double[1]; long xorsumBits = 0; long vBits; // for dp ieee 754 , sign and exp are the high 12 bits // We don't want infinity or nan, because h2o will return a string. double xorsum = 0; for (double v : in) { vBits = Double.doubleToLongBits(v); xorsumBits = xorsumBits ^ vBits; } // just need to not get inf or nan. If we zero the upper 4 bits, we won't final long ZERO_SOME_SIGN_EXP = 0x0fffffffffffffffL; xorsumBits = xorsumBits & ZERO_SOME_SIGN_EXP; xorsum = Double.longBitsToDouble(xorsumBits); out[0] = xorsum; return out; } } // Selective return. If the selector is a double, just eval both args and // return the selected one. If the selector is an array, then it must be // compatible with argument arrays (if any), and the selection is done // element-by-element. class ASTIfElse extends ASTOp { static final String VARS[] = new String[]{"ifelse","tst","true","false"}; static Type[] newsig() { Type t1 = Type.unbound(), t2 = Type.unbound(), t3=Type.unbound(); return new Type[]{Type.anyary(new Type[]{t1,t2,t3}),t1,t2,t3}; } ASTIfElse( ) { super(VARS, newsig(),OPF_INFIX,OPP_PREFIX,OPA_RIGHT); } @Override ASTOp make() {return new ASTIfElse();} @Override String opStr() { return "ifelse"; } // Parse an infix trinary ?: operator static AST parse(Exec2 E, AST tst, boolean EOS) { if( !E.peek('?',true) ) return null; int x=E._x; AST tru=E.xpeek(':',E._x,parseCXExpr(E,false)); if( tru == null ) E.throwErr("Missing expression in trinary",x); x = E._x; AST fal=parseCXExpr(E,EOS); if( fal == null ) E.throwErr("Missing expression in trinary",x); return ASTApply.make(new AST[]{new ASTIfElse(),tst,tru,fal},E,x); } @Override void apply(Env env, int argcnt, ASTApply apply) { // All or none are functions assert ( env.isFcn(-1) && env.isFcn(-2) && _t.ret().isFcn()) || (!env.isFcn(-1) && !env.isFcn(-2) && !_t.ret().isFcn()); // If the result is an array, then one of the other of the two must be an // array. , and this is a broadcast op. assert !_t.isAry() || env.isAry(-1) || env.isAry(-2); // Single selection? Then just pick slots if( !env.isAry(-3) ) { if( env.dbl(-3)==0 ) env.pop_into_stk(-4); else { env.pop(); env.pop_into_stk(-3); } return; } Frame frtst=null, frtru= null, frfal= null; double dtst= 0 , dtru= 0 , dfal= 0 ; if( env.isAry() ) frfal= env.popAry(); else dfal = env.popDbl(); String kf = env.key(); if( env.isAry() ) frtru= env.popAry(); else dtru = env.popDbl(); String kt = env.key(); if( env.isAry() ) frtst= env.popAry(); else dtst = env.popDbl(); String kq = env.key(); // Multi-selection // Build a doAll frame Frame fr = new Frame(frtst); // Do-All frame final int ncols = frtst.numCols(); // Result column count final long nrows = frtst.numRows(); // Result row count String names[]=null; if( frtru !=null ) { // True is a Frame? if( frtru.numCols() != ncols || frtru.numRows() != nrows ) throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frtru); fr.add(frtru,true); names = frtru._names; } if( frfal !=null ) { // False is a Frame? if( frfal.numCols() != ncols || frfal.numRows() != nrows ) throw new IllegalArgumentException("Arrays must be same size: "+frtst+" vs "+frfal); fr.add(frfal,true); names = frfal._names; } if( names==null && frtst!=null ) names = frtst._names; final boolean t = frtru != null; final boolean f = frfal != null; final double fdtru = dtru; final double fdfal = dfal; // Run a selection picking true/false across the frame Frame fr2 = new MRTask2() { @Override public void map( Chunk chks[], NewChunk nchks[] ) { for( int i=0; i<nchks.length; i++ ) { NewChunk n =nchks[i]; int off=i; Chunk ctst= chks[off]; Chunk ctru= t ? chks[off+=ncols] : null; Chunk cfal= f ? chks[off+=ncols] : null; int rlen = ctst._len; for( int r=0; r<rlen; r++ ) if( ctst.isNA0(r) ) n.addNA(); else n.addNum(ctst.at0(r)!=0 ? (t ? ctru.at0(r) : fdtru) : (f ? cfal.at0(r) : fdfal)); } } }.doAll(ncols,fr).outputFrame(names,fr.domains()); env.subRef(frtst,kq); if( frtru != null ) env.subRef(frtru,kt); if( frfal != null ) env.subRef(frfal,kf); env.pop(); env.push(fr2); } } class ASTCut extends ASTOp { ASTCut() { super(new String[]{"cut", "ary", "dbls"}, new Type[]{Type.ARY, Type.ARY, Type.dblary()}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "cut"; } @Override ASTOp make() {return new ASTCut();} @Override void apply(Env env, int argcnt, ASTApply apply) { if(env.isDbl()) { final int nbins = (int) Math.floor(env.popDbl()); if(nbins < 2) throw new IllegalArgumentException("Number of intervals must be at least 2"); Frame fr = env.popAry(); String skey = env.key(); if(fr.vecs().length != 1 || fr.vecs()[0].isEnum()) throw new IllegalArgumentException("First argument must be a numeric column vector"); final double fmax = fr.vecs()[0].max(); final double fmin = fr.vecs()[0].min(); final double width = (fmax - fmin)/nbins; if(width == 0) throw new IllegalArgumentException("Data vector is constant!"); // Note: I think R perturbs constant vecs slightly so it can still bin values // Construct domain names from bins intervals String[][] domains = new String[1][nbins]; domains[0][0] = "(" + String.valueOf(fmin - 0.001*(fmax-fmin)) + "," + String.valueOf(fmin + width) + "]"; for(int i = 1; i < nbins; i++) domains[0][i] = "(" + String.valueOf(fmin + i*width) + "," + String.valueOf(fmin + (i+1)*width) + "]"; Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { for(int r = 0; r < chk._len; r++) { double x = chk.at0(r); double n = x == fmax ? nbins-1 : Math.floor((x - fmin)/width); nchk.addNum(n); } } }.doAll(1,fr).outputFrame(fr._names, domains); env.subRef(fr, skey); env.pop(); env.push(fr2); } else if(env.isAry()) { Frame ary = env.popAry(); String skey1 = env.key(); if(ary.vecs().length != 1 || ary.vecs()[0].isEnum()) throw new IllegalArgumentException("Second argument must be a numeric column vector"); Vec brks = ary.vecs()[0]; // TODO: Check that num rows below some cutoff, else this will likely crash // Remove duplicates and sort vector of breaks in ascending order SortedSet<Double> temp = new TreeSet<Double>(); for(int i = 0; i < brks.length(); i++) temp.add(brks.at(i)); int cnt = 0; final double[] cutoffs = new double[temp.size()]; for(Double x : temp) { cutoffs[cnt] = x; cnt++; } if(cutoffs.length < 2) throw new IllegalArgumentException("Vector of breaks must have at least 2 unique values"); Frame fr = env.popAry(); String skey2 = env.key(); if(fr.vecs().length != 1 || fr.vecs()[0].isEnum()) throw new IllegalArgumentException("First argument must be a numeric column vector"); // Construct domain names from bin intervals final int nbins = cutoffs.length-1; String[][] domains = new String[1][nbins]; for(int i = 0; i < nbins; i++) domains[0][i] = "(" + cutoffs[i] + "," + cutoffs[i+1] + "]"; Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { for(int r = 0; r < chk._len; r++) { double x = chk.at0(r); if(Double.isNaN(x) || x <= cutoffs[0] || x > cutoffs[cutoffs.length-1]) nchk.addNum(Double.NaN); else { for(int i = 1; i < cutoffs.length; i++) { if(x <= cutoffs[i]) { nchk.addNum(i-1); break; } } } } } }.doAll(1,fr).outputFrame(fr._names, domains); env.subRef(ary, skey1); env.subRef(fr, skey2); env.pop(); env.push(fr2); } else throw H2O.unimpl(); } } class ASTfindInterval extends ASTOp { ASTfindInterval() { super(new String[]{"findInterval", "ary", "vec", "rightmost.closed"}, new Type[]{Type.ARY, Type.ARY, Type.dblary(), Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "findInterval"; } @Override ASTOp make() { return new ASTfindInterval(); } @Override void apply(Env env, int argcnt, ASTApply apply) { final boolean rclosed = env.popDbl() == 0 ? false : true; if(env.isDbl()) { final double cutoff = env.popDbl(); Frame fr = env.popAry(); String skey = env.key(); if(fr.vecs().length != 1 || fr.vecs()[0].isEnum()) throw new IllegalArgumentException("First argument must be a numeric column vector"); Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { for(int r = 0; r < chk._len; r++) { double x = chk.at0(r); if(Double.isNaN(x)) nchk.addNum(Double.NaN); else { if(rclosed) nchk.addNum(x > cutoff ? 1 : 0); // For rightmost.closed = TRUE else nchk.addNum(x >= cutoff ? 1 : 0); } } } }.doAll(1,fr).outputFrame(fr._names, fr.domains()); env.subRef(fr, skey); env.pop(); env.push(fr2); } else if(env.isAry()) { Frame ary = env.popAry(); String skey1 = env.key(); if(ary.vecs().length != 1 || ary.vecs()[0].isEnum()) throw new IllegalArgumentException("Second argument must be a numeric column vector"); Vec brks = ary.vecs()[0]; // TODO: Check that num rows below some cutoff, else this will likely crash // Check if vector of cutoffs is sorted in weakly ascending order final int len = (int)brks.length(); final double[] cutoffs = new double[len]; for(int i = 0; i < len-1; i++) { if(brks.at(i) > brks.at(i+1)) throw new IllegalArgumentException("Second argument must be sorted in non-decreasing order"); cutoffs[i] = brks.at(i); } cutoffs[len-1] = brks.at(len-1); Frame fr = env.popAry(); String skey2 = env.key(); if(fr.vecs().length != 1 || fr.vecs()[0].isEnum()) throw new IllegalArgumentException("First argument must be a numeric column vector"); Frame fr2 = new MRTask2() { @Override public void map(Chunk chk, NewChunk nchk) { for(int r = 0; r < chk._len; r++) { double x = chk.at0(r); if(Double.isNaN(x)) nchk.addNum(Double.NaN); else { double n = Arrays.binarySearch(cutoffs, x); if(n < 0) nchk.addNum(-n-1); else if(rclosed && n == len-1) nchk.addNum(n); // For rightmost.closed = TRUE else nchk.addNum(n+1); } } } }.doAll(1,fr).outputFrame(fr._names, fr.domains()); env.subRef(ary, skey1); env.subRef(fr, skey2); env.pop(); env.push(fr2); } } } class ASTFactor extends ASTOp { ASTFactor() { super(new String[]{"factor", "ary"}, new Type[]{Type.ARY, Type.ARY}, OPF_PREFIX, OPP_PREFIX,OPA_RIGHT); } @Override String opStr() { return "factor"; } @Override ASTOp make() {return new ASTFactor();} @Override void apply(Env env, int argcnt, ASTApply apply) { Frame ary = env.peekAry(); // Ary on top of stack, keeps +1 refcnt String skey = env.peekKey(); if( ary.numCols() != 1 ) throw new IllegalArgumentException("factor requires a single column"); Vec v0 = ary.vecs()[0]; Vec v1 = v0.isEnum() ? null : v0.toEnum(); if (v1 != null) { ary = new Frame(ary._names,new Vec[]{v1}); skey = null; } env.poppush(2, ary, skey); } } class ASTNumeric extends ASTOp { ASTNumeric() { super(new String[]{"as.numeric", "ary"}, new Type[]{Type.ARY, Type.ARY}, OPF_PREFIX, OPP_PREFIX,OPA_RIGHT); } @Override String opStr() { return "as.numeric"; } @Override ASTOp make() {return new ASTNumeric();} @Override void apply(Env env, int argcnt, ASTApply apply) { Frame ary = env.peekAry(); // Ary on top of stack, keeps +1 refcnt String skey = env.peekKey(); Vec[] nvecs = new Vec[ary.numCols()]; for (int c = 0; c < ary.numCols(); ++c) { Vec v = ary.vecs()[c]; Vec nv = v.isEnum() ? v.makeTransf(seq(0, v.domain().length), seq(0, v.domain().length), null) : null; if (nv != null) nv._domain = null; nvecs[c] = nv == null ? v : nv; } ary = new Frame(ary._names, ary.vecs()); env.poppush(2, ary, skey); } } class ASTPrint extends ASTOp { static Type[] newsig() { Type t1 = Type.unbound(); return new Type[]{t1, t1, Type.varargs(Type.unbound())}; } ASTPrint() { super(new String[]{"print", "x", "y..."}, newsig(), OPF_PREFIX, OPP_PREFIX,OPA_RIGHT); } @Override String opStr() { return "print"; } @Override ASTOp make() {return new ASTPrint();} @Override void apply(Env env, int argcnt, ASTApply apply) { for( int i=1; i<argcnt; i++ ) { if( env.isAry(i-argcnt) ) { env._sb.append(env.ary(i-argcnt).toStringAll()); } else { env._sb.append(env.toString(env._sp+i-argcnt,true)); } } env.pop(argcnt-2); // Pop most args env.pop_into_stk(-2); // Pop off fcn, returning 1st arg } } /** * R 'ls' command. * * This method is purely for the console right now. Print stuff into the string buffer. * JSON response is not configured at all. */ class ASTLs extends ASTOp { ASTLs() { super(new String[]{"ls"}, new Type[]{Type.DBL}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "ls"; } @Override ASTOp make() {return new ASTLs();} @Override void apply(Env env, int argcnt, ASTApply apply) { for( Key key : H2O.KeySnapshot.globalSnapshot().keys()) if( key.user_allowed() && H2O.get(key) != null ) env._sb.append(key.toString()); // Pop the self-function and push a zero. env.pop(); env.push(0.0); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/ASTRApply.java
package water.exec; import water.MRTask2; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.NewChunk; import water.fvec.Vec; import water.util.FrameUtils; /** Parse a generic R string and build an AST, in the context of an H2O Cloud * @author cliffc@0xdata.com */ // -------------------------------------------------------------------------- // R's Apply. Function is limited to taking a single column and returning // a single column. Double is limited to 1 or 2, statically determined. class ASTRApply extends ASTOp { static final String VARS[] = new String[]{ "", "ary", "dbl1.2", "fcn"}; ASTRApply( ) { super(VARS, new Type[]{ Type.ARY, Type.dblary(), Type.dblary(), Type.fcn(new Type[]{Type.dblary(),Type.ARY}) }, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } protected ASTRApply( String vars[], Type ts[], int form, int prec, int asso) { super(vars,ts,form,prec,asso); } @Override String opStr(){ return "apply";} @Override ASTOp make() {return new ASTRApply();} @Override void apply(Env env, int argcnt, ASTApply apply) { // Peek everything from the stack final ASTOp op = env.fcn(-1); // ary->dblary but better be ary[,1]->dblary[,1] double d = env.dbl(-2); // MARGIN: ROW=1, COLUMN=2 selector Frame fr = env.ary(-3); // The Frame to work on if( d==2 || d== -1 ) { // Work on columns? int ncols = fr.numCols(); double ds[][] = null; // If results are doubles, gather in small array Frame fr2 = null; // If the results are Vecs, gather them in this Frame String err = "apply requires that "+op+" return 1 column"; if( op._t.ret().isDbl() ) ds = new double[ncols][1]; else fr2 = new Frame(new String[0],new Vec[0]); // Apply the function across columns try { Vec vecs[] = fr.vecs(); for( int i=0; i<ncols; i++ ) { env.push(op); env.push(new Frame(new String[]{fr._names[i]},new Vec[]{vecs[i]})); env.fcn(-2).apply(env, 2, null); if( ds != null ) { // Doubles or Frame results? ds[i][0] = env.popDbl(); } else { // Frame results fr2.add(fr._names[i], env.popXAry().theVec(err)); } } } catch( IllegalArgumentException iae ) { env.subRef(fr2,null); throw iae; } env.pop(4); if( ds != null ) env.push(FrameUtils.frame(new String[]{"C1"},ds)); else env.push(fr2); assert env.isAry(); return; } if( d==1 || d==-2) { // Work on rows // apply on rows is essentially a map function Type ts[] = new Type[2]; ts[0] = Type.unbound(); ts[1] = Type.ARY; Type ft1 = Type.fcn(ts); Type ft2 = op._t.find(); // Should be a function type if( !ft1.union(ft2) ) { if( ft2._ts.length != 2 ) throw new IllegalArgumentException("FCN " + op.toString() + " cannot accept one argument."); if( !ft2._ts[1].union(ts[1]) ) throw new IllegalArgumentException("Arg " + op._vars[1] + " typed " + ft2._ts[1].find() + " but passed as " + ts[1]); assert false; } // find out return type double[] rowin = new double[fr.vecs().length]; for (int c = 0; c < rowin.length; c++) rowin[c] = fr.vecs()[c].at(0); final int outlen = op.map(env,rowin,null).length; final Env env0 = env; MRTask2 mrt = new MRTask2() { @Override public void map(Chunk[] cs, NewChunk[] ncs) { double rowin [] = new double[cs.length]; double rowout[] = new double[outlen]; for (int row = 0; row < cs[0]._len; row++) { for (int c = 0; c < cs.length; c++) rowin[c] = cs[c].at0(row); op.map(env0, rowin, rowout); for (int c = 0; c < ncs.length; c++) ncs[c].addNum(rowout[c]); } } }; String[] names = new String[outlen]; for (int i = 0; i < names.length; i++) names[i] = "C"+(i+1); Frame res = mrt.doAll(outlen,fr).outputFrame(names, null); env.poppush(4,res,null); return; } throw new IllegalArgumentException("MARGIN limited to 1 (rows) or 2 (cols)"); } } // -------------------------------------------------------------------------- // Same as "apply" but defaults to columns. class ASTSApply extends ASTRApply { static final String VARS[] = new String[]{ "", "ary", "fcn"}; ASTSApply( ) { super(VARS, new Type[]{ Type.ARY, Type.ARY, Type.fcn(new Type[]{Type.dblary(),Type.ARY}) }, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr(){ return "sapply";} @Override ASTOp make() {return new ASTSApply();} @Override void apply(Env env, int argcnt, ASTApply apply) { // Stack: SApply, ary, fcn // -->: RApply, ary, 2, fcn assert env.isFcn(-3); env._fcn[env._sp-3] = new ASTRApply(); ASTOp fcn = env.popFcn(); // Pop, no ref-cnt env.push(2.0); env.push(1); env._fcn[env._sp-1] = fcn; // Push, no ref-cnt super.apply(env,argcnt+1,null); } } // -------------------------------------------------------------------------- // unique(ary) // Returns only the unique rows class ASTUnique extends ASTddply { static final String VARS[] = new String[]{ "", "ary"}; ASTUnique( ) { super(VARS, new Type[]{Type.ARY, Type.ARY}); } @Override String opStr(){ return "unique";} @Override ASTOp make() {return new ASTUnique();} @Override void apply(Env env, int argcnt, ASTApply apply) { Thread cThr = Thread.currentThread(); Frame fr = env.peekAry(); int cols[] = new int[fr.numCols()]; for( int i=0; i<cols.length; i++ ) cols[i]=i; ddplyPass1 p1 = new ddplyPass1( false, cols ).doAll(fr); double dss[][] = new double[p1._groups.size()][]; int i=0; for( Group g : p1._groups.keySet() ) dss[i++] = g._ds; Frame res = FrameUtils.frame(fr._names,dss); env.poppush(2,res,null); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/ASTTable.java
package water.exec; import water.Key; import water.MRTask2; import water.fvec.*; import water.util.Utils; import java.util.Arrays; public class ASTTable extends ASTOp { ASTTable() { super(new String[]{"table", "ary"}, new Type[]{Type.ARY,Type.ARY}, OPF_PREFIX, OPP_PREFIX, OPA_RIGHT); } @Override String opStr() { return "table"; } @Override ASTOp make() { return new ASTTable(); } @Override void apply(Env env, int argcnt, ASTApply apply) { int ncol; Frame fr = env.ary(-1); if ((ncol = fr.vecs().length) > 2) throw new IllegalArgumentException("table does not apply to more than two cols."); for (int i = 0; i < ncol; i++) if (!fr.vecs()[i].isInt()) throw new IllegalArgumentException("table only applies to integer vectors."); String[][] domains = new String[ncol][]; // the domain names to display as row and col names // if vec does not have original domain, use levels returned by CollectDomain long[][] levels = new long[ncol][]; for (int i = 0; i < ncol; i++) { Vec v = fr.vecs()[i]; levels[i] = new Vec.CollectDomain(v).doAll(new Frame(v)).domain(); domains[i] = v.domain(); } long[][] counts = new Tabularize(levels).doAll(fr)._counts; // Build output vecs Key keys[] = Vec.VectorGroup.VG_LEN1.addVecs(counts.length+1); Vec[] vecs = new Vec[counts.length+1]; String[] colnames = new String[counts.length+1]; AppendableVec v0 = new AppendableVec(keys[0]); v0._domain = fr.vecs()[0].domain() == null ? null : fr.vecs()[0].domain().clone(); NewChunk c0 = new NewChunk(v0,0); for( int i=0; i<levels[0].length; i++ ) c0.addNum((double) levels[0][i]); c0.close(0,null); vecs[0] = v0.close(null); colnames[0] = "row.names"; if (ncol==1) colnames[1] = "Count"; for (int level1=0; level1 < counts.length; level1++) { AppendableVec v = new AppendableVec(keys[level1+1]); NewChunk c = new NewChunk(v,0); v._domain = null; for (int level0=0; level0 < counts[level1].length; level0++) c.addNum((double) counts[level1][level0]); c.close(0, null); vecs[level1+1] = v.close(null); if (ncol>1) { colnames[level1+1] = domains[1]==null? Long.toString(levels[1][level1]) : domains[1][(int)(levels[1][level1])]; } } env.pop(2); env.push(new Frame(colnames, vecs)); } public static class Tabularize extends MRTask2<Tabularize> { public final long[][] _domains; public long[][] _counts; public Tabularize(long[][] dom) { super(); _domains=dom; } @Override public void map(Chunk[] cs) { assert cs.length == _domains.length; _counts = _domains.length==1? new long[1][] : new long[_domains[1].length][]; for (int i=0; i < _counts.length; i++) _counts[i] = new long[_domains[0].length]; for (int i=0; i < cs[0]._len; i++) { if (cs[0].isNA0(i)) continue; long ds[] = _domains[0]; int level0 = Arrays.binarySearch(ds, cs[0].at80(i)); assert 0 <= level0 && level0 < ds.length : "l0="+level0+", len0="+ds.length+", min="+ds[0]+", max="+ds[ds.length-1]; int level1; if (cs.length>1) { if (cs[1].isNA0(i)) continue; else level1 = Arrays.binarySearch(_domains[1],(int)cs[1].at80(i)); assert 0 <= level1 && level1 < _domains[1].length; } else { level1 = 0; } _counts[level1][level0]++; } } @Override public void reduce(Tabularize that) { Utils.add(_counts, that._counts); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/ASTddply.java
package water.exec; import water.*; import water.fvec.*; import water.nbhm.NonBlockingHashMap; import water.util.Log; import java.util.ArrayList; import java.util.Arrays; // -------------------------------------------------------------------------- // PLYR's DDPLY. GroupBy by any other name. Type signature: // #RxN ddply(RxC,subC, 1xN function( subRxC ) { ... } ) // R - Rows in original frame // C - Cols in original frame // subC - Subset of C; either a single column entry, or a 1 Vec frame with a list of columns. // subR - Subset of R, where all subC values are the same. // N - Return column(s). Can be 1, and so fcn can return a dbl instead of 1xN // #R - # of unique combos in the original "subC" set public class ASTddply extends ASTOp { static final String VARS[] = new String[]{ "#RxN", "RxC", "subC", "fcn_subRxC"}; ASTddply( ) { this(VARS, new Type[]{ Type.ARY, Type.ARY, Type.dblary(), Type.fcn(new Type[]{Type.dblary(),Type.ARY}) }); } ASTddply(String vars[], Type types[] ) { super(vars,types,OPF_PREFIX,OPP_PREFIX,OPA_RIGHT); } @Override String opStr(){ return "ddply";} @Override ASTOp make() {return new ASTddply();} @Override void apply(Env env, int argcnt, ASTApply apply) { Frame fr = env.ary(-3); // The Frame to work on // Either a single column, or a collection of columns to group on. int cols[]; if( !env.isAry(-2) ) { // Single column? if( Double.isNaN(env.dbl(-2)) ) throw new IllegalArgumentException("NA not a valid column"); cols = new int[]{(int)env.dbl(-2)-1}; } else { // Else a collection of columns? Frame cs = env.ary(-2); if( cs.numCols() != 1 ) throw new IllegalArgumentException("Only one column-of-columns for column selection"); if( cs.numRows() > 1000 ) throw new IllegalArgumentException("Too many columns selected"); cols = new int[(int)cs.numRows()]; Vec vec = cs.vecs()[0]; for( int i=0; i<cols.length; i++ ) if( vec.isNA(i) ) throw new IllegalArgumentException("NA not a valid column"); else cols[i] = (int)vec.at8(i)-1; } // Another check for sane columns for( int c : cols ) if( c < 0 || c >= fr.numCols() ) throw new IllegalArgumentException("Column "+(c+1)+" out of range for frame columns "+fr.numCols()); // Was pondering a SIMD-like execution model, running the fcn "once" - but // in parallel for all groups. But this isn't going to work: each fcn // execution will take different control paths. Also the functions side- // effects' must only happen once, and they will make multiple passes over // the Frame passed in. // // GroupIDs' can vary from 1 group to 1-per-row. Are formed by the cross- // product of the selection cols. Will be hashed to find Group - NBHML // mapping row-contents to group. Index is a sample row. NBHML per-node, // plus roll-ups. Result/Value is Group structure pointing to NewChunks // holding row indices. // Pass 1: Find Groups. // Build a NBHSet of unique double[]'s holding selection cols. // These are the unique groups, found per-node, rolled-up globally // Record the rows belonging to each group, locally. ddplyPass1 p1 = new ddplyPass1(true,cols).doAll(fr); // Pass 2: Build Groups. // Wrap Vec headers around all the local row-counts. int numgrps = p1._groups.size(); int csz = H2O.CLOUD.size(); ddplyPass2 p2 = new ddplyPass2(p1,numgrps,csz).invokeOnAllNodes(); // vecs[] iteration order exactly matches p1._groups.keySet() Vec vecs[] = p2.close(); // Push the execution env around the cluster Key envkey = Key.make(); UKV.put(envkey, env); // Pass 3: Send Groups 'round the cluster // Single-threaded per-group work. // Send each group to some remote node for execution int grpnum=0; // vecs[] iteration order exactly matches p1._groups.keySet() int nlocals[] = new int[csz]; // Count of local group# ArrayList<AppendableVec> grpCols = new ArrayList<AppendableVec>(); ArrayList<NewChunk> nchks = new ArrayList<NewChunk>(); for (int col : cols) { AppendableVec av = new AppendableVec(Vec.VectorGroup.VG_LEN1.addVec()); grpCols.add(av); nchks.add(new NewChunk(av, 0)); } RemoteExec re = null; // Sample RemoteExec Futures fs = new Futures(); int ncols; for( Group g : p1._groups.keySet() ) { // vecs[] iteration order exactly matches p1._groups.keySet() Vec rows = vecs[grpnum++]; // Rows for this Vec Vec[] data = fr.vecs(); // Full data columns Vec[] gvecs = new Vec[data.length]; Key[] keys = rows.group().addVecs(data.length); for( int c=0; c<data.length; c++ ) { gvecs[c] = new SubsetVec(rows._key, data[c]._key, keys[c], rows._espc); gvecs[c]._domain = data[c]._domain; } Key grpkey = Key.make("ddply_grpkey_"+(grpnum-1)); Frame fg = new Frame(grpkey, fr._names,gvecs); Futures gfs = new Futures(); DKV.put(grpkey, fg, gfs); gfs.blockForPending(); fg.anyVec().rollupStats(); // Non-blocking, send a group to a remote node for execution final int nidx = g.hashCode()%csz; fs.add(RPC.call(H2O.CLOUD._memary[nidx],(re=new RemoteExec((grpnum-1),p2._nlocals[nidx],g._ds,fg,envkey)))); } fs.blockForPending(); // Wait for all functions to finish //Fold results together; currently stored in Iced Result objects grpnum = 0; for (Group g: p1._groups.keySet()) { int c = 0; for (double d : g._ds) nchks.get(c++).addNum(d); Key rez_key = Key.make("ddply_RemoteRez_"+grpnum++); Result rg = UKV.get(rez_key); if (rg == null) Log.info("Result was null: grp_id = " + (grpnum - 1) + " rez_key = " + rez_key); ncols = rg.isRow() ? rg.resultR().length : 1; if (nchks.size() < ncols + cols.length) { for(int i = 0; i < ncols;++i) { AppendableVec av = new AppendableVec(Vec.VectorGroup.VG_LEN1.addVec()); grpCols.add(av); nchks.add(new NewChunk(av, 0)); } } for (int i = 0; i < ncols; ++i) nchks.get(c++).addNum(rg.isRow() ? rg.resultR()[i] : rg.resultD()); UKV.remove(rez_key); } Vec vres[] = new Vec[grpCols.size()]; for (int i = 0; i < vres.length; ++i) { nchks.get(i).close(0, fs); vres[i] = grpCols.get(i).close(fs); } fs.blockForPending(); // Result Frame String[] names = new String[grpCols.size()]; for( int i = 0; i < cols.length; i++) { names[i] = fr._names[cols[i]]; vres[i]._domain = fr.vecs()[cols[i]]._domain; } for( int i = cols.length; i < names.length; i++) names[i] = "C"+(i-cols.length+1); Frame ff = new Frame(names,vres); // Cleanup pass: Drop NAs (groups with no data and NA groups, basically does na.omit: drop rows with NA) boolean anyNA = false; Frame res = ff; for (Vec v : ff.vecs()) if (v.naCnt() != 0) { anyNA = true; break; } // stop on first vec with naCnt != 0 if (anyNA) { res = new MRTask2() { @Override public void map(Chunk[] cs, NewChunk[] nc) { int rows = cs[0]._len; int cols = cs.length; boolean[] NACols = new boolean[cols]; ArrayList<Integer> xrows = new ArrayList<Integer>(); for (int i = 0; i < cols; ++i) NACols[i] = (cs[i]._vec.naCnt() != 0); for (int r = 0; r < rows; ++r) for (int c = 0; c < cols; ++c) if (NACols[c]) if (cs[c].isNA0(r)) { xrows.add(r); break; } for (int r = 0; r < rows; ++r) { if (xrows.contains(r)) continue; for (int c = 0; c < cols; ++c) { if (cs[c]._vec.isEnum()) nc[c].addEnum((int) cs[c].at80(r)); else nc[c].addNum(cs[c].at0(r)); } } } }.doAll(ff.numCols(), ff).outputFrame(null, ff.names(), ff.domains()); ff.delete(); } // Delete the group row vecs UKV.remove(envkey); env.poppush(4,res,null); } // --- // Group descrption: unpacked selected double columns public static class Group extends Iced { public double _ds[]; public int _hash; public Group(int len) { _ds = new double[len]; } Group( double ds[] ) { _ds = ds; _hash=hash(); } // Efficiently allow groups to be hashed & hash-probed public void fill(int row, Chunk chks[], int cols[]) { for( int c=0; c<cols.length; c++ ) // For all selection cols _ds[c] = chks[cols[c]].at0(row); // Load into working array _hash = hash(); } private int hash() { long h=0; // hash is sum of field bits for( double d : _ds ) h += Double.doubleToRawLongBits(d); // Doubles are lousy hashes; mix up the bits some h ^= (h>>>20) ^ (h>>>12); h ^= (h>>> 7) ^ (h>>> 4); return (int)((h^(h>>32))&0x7FFFFFFF); } public boolean has(double ds[]) { return Arrays.equals(_ds, ds); } @Override public boolean equals( Object o ) { return o instanceof Group && Arrays.equals(_ds,((Group)o)._ds); } @Override public int hashCode() { return _hash; } @Override public String toString() { return Arrays.toString(_ds); } } private static class Result extends Iced { double _d; // Result was a single double double[] _r; // Result was a row Result(double d, double[] r) {_d = d; _r = r; } boolean isRow() { return _r != null; } double[] resultR () { return _r; } double resultD () { return _d; } } // --- // Pass1: Find unique groups, based on a subset of columns. // Collect rows-per-group, locally. protected static class ddplyPass1 extends MRTask2<ddplyPass1> { // INS: private boolean _gatherRows; // TRUE if gathering rows-per-group, FALSE if just getting the groups private int _cols[]; // Selection columns private Key _uniq; // Unique Key for this entire ddply pass ddplyPass1( boolean rows, int cols[] ) { _gatherRows=rows; _cols = cols; _uniq=Key.make(); } // OUTS: mapping from groups to row#s that are in that group protected NonBlockingHashMap<Group,NewChunk> _groups; // *Local* results from ddplyPass1 are kept locally in this tmp structure. // Pass2 reads them out & reclaims the space. private static NonBlockingHashMap<Key,ddplyPass1> PASS1TMP = new NonBlockingHashMap<Key,ddplyPass1>(); // Make a NewChunk to hold rows, that has a random Key and is not // associated with any Vec. We'll fold these into a Vec later when we know // cluster-wide what the Groups (and hence Vecs) are. private static final NewChunk XNC = new NewChunk(null,H2O.SELF.index()); private NewChunk makeNC( ) { return _gatherRows==false ? XNC : new NewChunk(null,H2O.SELF.index()); } // Build a Map mapping Groups to a NewChunk of row #'s @Override public void map( Chunk chks[] ) { _groups = new NonBlockingHashMap<Group,NewChunk>(); Group g = new Group(_cols.length); NewChunk nc = makeNC(); Chunk C = chks[_cols[0]]; int len = C._len; long start = C._start; for( int row=0; row<len; row++ ) { // Temp array holding the column-selection data g.fill(row,chks,_cols); NewChunk nc_old = _groups.putIfAbsent(g,nc); if( nc_old==null ) { // Add group signature if not already present nc_old = nc; // Jammed 'nc' into the table to hold rows g = new Group(_cols.length); // Need a new <Group,NewChunk> pair nc = makeNC(); } if( _gatherRows ) // Gathering rows? nc_old.addNum(start+row,0); // Append rows into the existing group } } // Fold together two Group/NewChunk Maps. For the same Group, append // NewChunks (hence gathering rows together). Since the custom serializers // do not send the rows over the wire, we have only *local* row-counts. @Override public void reduce( ddplyPass1 p1 ) { assert _groups != p1._groups; // Fold 2 hash tables together. // Get the larger hash table in m0, smaller in m1 NonBlockingHashMap<Group,NewChunk> m0 = _groups; NonBlockingHashMap<Group,NewChunk> m1 = p1._groups; if( m0.size() < m1.size() ) { NonBlockingHashMap<Group,NewChunk> tmp=m0; m0=m1; m1=tmp; } // Iterate over smaller table, folding into larger table. for( Group g : m1.keySet() ) { NewChunk nc0 = m0.get(g); NewChunk nc1 = m1.get(g); if( nc0 == null || nc0._len == 0) m0.put(g,nc1); // unimplemented: expected to blow out on large row counts, where we // actually need a collection of chunks, not 1 uber-chunk else if( _gatherRows ) { // All longs are monotonically in-order. Not sure if this is needed // but it's an easy invariant to keep and it makes reading row#s easier. if( nc0._len > 0 && nc1._len > 0 && // len==0 for reduces from remotes (since no rows sent) nc0.at8_impl(nc0._len-1) >= nc1.at8_impl(0) ) nc0.addr(nc1); else if (nc1._len != 0) nc0.add (nc1); } } _groups = m0; p1._groups = null; } @Override public String toString() { return _groups==null ? null : _groups.toString(); } // Save local results for pass2 @Override public void closeLocal() { if( _gatherRows ) PASS1TMP.put(_uniq,this); } // Custom serialization for NBHM. Much nicer when these are auto-gen'd. // Only sends Groups over the wire, NOT NewChunks with rows. @Override public AutoBuffer write( AutoBuffer ab ) { super.write(ab); ab.putZ(_gatherRows); ab.putA4(_cols); ab.put(_uniq); if( _groups == null ) return ab.put4(0); ab.put4(_groups.size()); for( Group g : _groups.keySet() ) ab.put(g); return ab; } @Override public ddplyPass1 read( AutoBuffer ab ) { super.read(ab); assert _groups == null; _gatherRows = ab.getZ(); _cols = ab.getA4(); _uniq = ab.get(); int len = ab.get4(); if( len == 0 ) return this; _groups = new NonBlockingHashMap<Group,NewChunk>(); for( int i=0; i<len; i++ ) _groups.put(ab.get(Group.class),new NewChunk(null,-99)); return this; } @Override public void copyOver( Freezable dt ) { ddplyPass1 that = (ddplyPass1)dt; super.copyOver(that); this._gatherRows = that._gatherRows; this._cols = that._cols; this._uniq = that._uniq; this._groups = that._groups; } } // --- // Pass 2: Build Groups. // Wrap Frame/Vec headers around all the local row-counts. private static class ddplyPass2 extends DRemoteTask<ddplyPass2> { // Key uniquely identifying a pass1 collection of NewChunks Key _p1key; // One new Vec per Group, holding just rows AppendableVec _avs[]; // The Group descripters double _dss[][]; // Count of groups-per-node (computed once on home node) transient int _nlocals[]; ddplyPass2( ddplyPass1 p1, int numgrps, int csz ) { _p1key = p1._uniq; // Key to finding the pass1 data // One new Vec per Group, holding just rows _avs = new AppendableVec[numgrps]; _dss = new double [numgrps][]; _nlocals = new int [csz]; int i=0; for( Group g : p1._groups.keySet() ) { _dss[i] = g._ds; _avs[i++] = new AppendableVec(Vec.VectorGroup.VG_LEN1.addVec()); _nlocals[g.hashCode()%csz]++; } } // Local (per-Node) work. Gather the chunks together into the Vecs @Override public void lcompute() { ddplyPass1 p1 = ddplyPass1.PASS1TMP.remove(_p1key); Futures fs = new Futures(); int cidx = H2O.SELF.index(); for( int i=0; i<_dss.length; i++ ) { // For all possible groups // Get the newchunk of local rows for a group Group g = new Group(_dss[i]); NewChunk nc = p1._groups == null ? null : p1._groups.get(g); if( nc != null && nc._len > 0 ) { // Fill in fields we punted on during construction nc._vec = _avs[i]; // Assign a proper vector nc.close(cidx,fs); // Close & compress chunk } else { // All nodes have a chunk, even if its empty DKV.put(_avs[i].chunkKey(cidx), new C0LChunk(0,0),fs); } } fs.blockForPending(); _p1key = null; // No need to return these _dss = null; tryComplete(); } @Override public void reduce( ddplyPass2 p2 ) { for( int i=0; i<_avs.length; i++ ) _avs[i].reduce(p2._avs[i]); } // Close all the AppendableVecs & return normal Vecs. Vec[] close() { Futures fs = new Futures(); Vec vs[] = new Vec[_avs.length]; for( int i=0; i<_avs.length; i++ ) vs[i] = _avs[i].close(fs); fs.blockForPending(); return vs; } } // --- // Called once-per-group, it executes the given function on the group. private static class RemoteExec extends DTask<RemoteExec> implements Freezable { // INS final int _grpnum, _numgrps; // This group # out of total groups double _ds[]; // Displayable name for this group Frame _fr; // Frame for this group Key _envkey; // Key for the execution environment // OUTS int _ncols; // Number of result columns RemoteExec( int grpnum, int numgrps, double ds[], Frame fr, Key envkey ) { _grpnum = grpnum; _numgrps = numgrps; _ds=ds; _fr=fr; _envkey=envkey; // Always 1 higher priority than calling thread... because the caller will // block & burn a thread waiting for this MRTask2 to complete. Thread cThr = Thread.currentThread(); _priority = (byte)((cThr instanceof H2O.FJWThr) ? ((H2O.FJWThr)cThr)._priority+1 : super.priority()); } final private byte _priority; @Override public byte priority() { return _priority; } // Execute the function on the group @Override public void compute2() { Env shared_env = UKV.get(_envkey); // Clone a private copy of the environment for local execution Env env = shared_env.capture(true); ASTOp op = env.fcn(-1); Key fr_key = Key.make("ddply_grpkey_"+_grpnum); Frame aa = DKV.get(fr_key).get(); Frame fv = new Frame(null, aa.names(), aa.vecs().clone()); // fv.anyVec().rollupStats(); env.push(op); env.push(fv); op.apply(env,2/*1-arg function*/,null); // Inspect the results; figure the result column count assert shared_env._sp+1 == env._sp; // Exactly one thing pushed Frame fr = null; if( env.isAry() && (fr=env.ary(-1)).numRows() != 1 ) throw new IllegalArgumentException("Result of ddply can only return 1 row but instead returned "+fr.numRows()); _ncols = fr == null ? 1 : fr.numCols(); double[] r = null; double d = Double.NaN; if (fr == null) d = env.dbl(-1); else { r = new double[_ncols]; for (int i = 0; i < _ncols; ++i) r[i] = fr.vecs()[i].at(0); } Key resultKey = Key.make("ddply_RemoteRez_"+_grpnum); Result rez = new Result(d, r); Futures fs = new Futures(); UKV.put(resultKey, rez, fs); fs.blockForPending(); // No need to return any results here. _fr.delete(); aa.delete(); _fr = null; _ds = null; _envkey= null; tryComplete(); } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/Env.java
package water.exec; import water.Futures; import water.Iced; import water.Key; import water.UKV; import water.fvec.Frame; import water.fvec.Vec; import water.util.Log; import water.util.Utils.IcedHashMap; import water.util.Utils.IcedInt; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; /** Execute a R-like AST, in the context of an H2O Cloud * @author cliffc@0xdata.com */ public class Env extends Iced { // An environment is a classic stack of values, passed into AST's as the // execution state. The 3 types we support are Frames (2-d tables of data), // doubles (which are an optimized form of a 1x1 Frame), and ASTs (which are // 1st class functions). String _key[] = new String[4]; // For top-level globals only, record a frame Key Frame _ary[] = new Frame [4]; // Frame (or null if not a frame) double _d [] = new double[4]; // Double (only if frame & func are null) ASTOp _fcn[] = new ASTOp [4]; // Functions (or null if not a function) String _str[] = new String[4]; int _sp; // Stack pointer // Also a Pascal-style display, one display entry per lexical scope. Slot // zero is the start of the global scope (which contains all global vars like // hex Keys) and always starts at offset 0. int _display[] = new int[4]; int _tod; String[] _warnings = new String[0]; // capture warnings // Ref Counts for each vector final IcedHashMap<Vec,IcedInt> _refcnt; transient final public StringBuilder _sb; // Holder for print results transient boolean _allow_tmp; // Deep-copy allowed to tmp transient boolean _busy_tmp; // Assert temp is available for use transient Frame _tmp; // The One Big Active Tmp transient final ArrayList<Key> _locked; // The original set of locked frames Env(ArrayList<Key> locked) { _key = new String[4]; // Key for Frame _ary = new Frame [4]; // Frame (or null if not a frame) _d = new double[4]; // Double (only if frame & func are null) _str = new String[4]; _fcn = new ASTOp [4]; // Functions (or null if not a function) _display= new int[4]; _refcnt = new IcedHashMap<Vec,IcedInt>(); _sb = new StringBuilder(); _locked = locked; } public String[] warnings() {return _warnings; } public int sp() { return _sp; } public boolean isAry() { return _ary[_sp-1] != null; } public boolean isFcn () { return _fcn[_sp-1] != null; } public boolean isDbl () { return !isAry() && !isFcn(); } public boolean isStr () { return !isAry() && !isFcn() && _str[_sp-1] != null; } public boolean isFcn (int i) { return _fcn[_sp+i] != null; } public boolean isAry(int i) { return _ary[_sp+i] != null; } // Peek operators public Frame ary(int i) { Frame fr = _ary[_sp+i]; assert fr != null; return fr; } public ASTOp fcn(int i) { ASTOp op = _fcn[_sp+i]; assert op != null; return op; } public double dbl(int i) { double d = _d [_sp+i]; return d; } public String str(int i) { String s = _str[_sp+i]; assert s != null; return s; } // Load the nth Id/variable from the named lexical scope, typed as a Frame public Frame frId(int d, int n) { int idx = _display[_tod-d]+n; assert _ary[idx]!=null; return _ary[idx]; } // Push k empty slots void push( int slots ) { assert 0 <= slots && slots < 1000; int len = _d.length; _sp += slots; while( _sp > len ) { _key= Arrays.copyOf(_key,len<<1); _ary= Arrays.copyOf(_ary,len<<1); _d = Arrays.copyOf(_d ,len<<1); _fcn= Arrays.copyOf(_fcn,len<<=1); _str= Arrays.copyOf(_str,len<<1); } } void push( Frame fr ) { push(1); _ary[_sp-1] = addRef(fr); assert _ary[0]==null||check_refcnt(_ary[0].anyVec());} void push( double d ) { push(1); _d [_sp-1] = d ; } void push( String st) { push(1); _str[_sp-1] = st ; } void push( ASTOp fcn) { push(1); _fcn[_sp-1] = addRef(fcn); } void push( Frame fr, String key ) { push(fr); _key[_sp-1]=key; } // Copy from display offset d, nth slot void push_slot( int d, int n ) { assert d==0; // Should use a fcn's closure for d>1 int idx = _display[_tod-d]+n; push(1); _ary[_sp-1] = addRef(_ary[idx]); _d [_sp-1] = _d [idx]; _fcn[_sp-1] = addRef(_fcn[idx]); _str[_sp-1] = _str[idx]; assert _ary[0]==null || check_refcnt(_ary[0].anyVec()); } void push_slot( int d, int n, Env global ) { assert _refcnt==null; // Should use a fcn's closure for d>1 int idx = _display[_tod-d]+n; int gidx = global._sp; global.push(1); global._ary[gidx] = global.addRef(_ary[idx]); global._d [gidx] = _d [idx] ; global._fcn[gidx] = global.addRef(_fcn[idx]); global._str[gidx] = _str[idx] ; assert _ary[0]==null || global.check_refcnt(_ary[0].anyVec()); } // Copy from TOS into a slot. Does NOT pop results. void tos_into_slot( int d, int n, String id ) { // In a copy-on-modify language, only update the local scope, or return val assert d==0 || (d==1 && _display[_tod]==n+1); int idx = _display[_tod-d]+n; // Temporary solution to kill a UDF from global name space. Needs to fix in the future. if (_tod == 0) ASTOp.removeUDF(id); subRef(_ary[idx], _key[idx]); subRef(_fcn[idx]); Frame fr = _ary[_sp-1]; _ary[idx] = fr==null ? null : addRef(new Frame(fr)); _d [idx] = _d [_sp-1] ; _str[idx] = _str[_sp-1] ; _fcn[idx] = addRef(_fcn[_sp-1]); _key[idx] = d==0 && fr!=null ? id : null; // Temporary solution to add a UDF to global name space. Needs to fix in the future. if (_tod == 0 && _fcn[_sp-1] != null) ASTOp.putUDF(_fcn[_sp-1], id); assert _ary[0]== null || check_refcnt(_ary[0].anyVec()); } // Copy from TOS into a slot, using absolute index. void tos_into_slot( int idx, String id ) { subRef(_ary[idx], _key[idx]); subRef(_fcn[idx]); Frame fr = _ary[_sp-1]; _ary[idx] = fr==null ? null : addRef(new Frame(fr)); _d [idx] = _d [_sp-1] ; _fcn[idx] = addRef(_fcn[_sp-1]); _str[idx] = _str[_sp-1] ; _key[idx] = fr!=null ? id : null; assert _ary[0]== null || check_refcnt(_ary[0].anyVec()); } // Copy from TOS into stack. Pop's all intermediate. // Example: pop_into_stk(-4) BEFORE: A,B,C,D,TOS AFTER: A,TOS void pop_into_stk( int x ) { assert x < 0; subRef(_ary[_sp+x], _key[_sp+x]); // Nuke out old stuff subRef(_fcn[_sp+x]); _ary[_sp+x] = _ary[_sp-1]; // Copy without changing ref cnt _fcn[_sp+x] = _fcn[_sp-1]; _d [_sp+x] = _d [_sp-1]; _str[_sp+x] = _str[_sp-1]; _sp--; x++; // Pop without changing ref cnt while( x++ < -1 ) pop(); } // Push a scope, leaving room for passed args int pushScope(int args) { assert fcn(-args-1) instanceof ASTFunc; // Expect a function under the args return _display[++_tod] = _sp-args; } // Grab the function for nested scope d ASTFunc fcnScope( int d ) { return (ASTFunc)_fcn[_display[_tod]-1]; } // Pop a slot. Lowers refcnts on vectors. Always leaves stack null behind // (to avoid dangling pointers stretching lifetimes). void pop( Env global ) { assert _sp > _display[_tod]; // Do not over-pop current scope _sp--; _fcn[_sp]=global.subRef(_fcn[_sp]); _ary[_sp]=global.subRef(_ary[_sp],_key[_sp]); assert _sp==0 || _ary[0]==null || check_refcnt(_ary[0].anyVec()); } public void popUncheck( ) { _sp--; _fcn[_sp]=subRef(_fcn[_sp]); _ary[_sp]=subRef(_ary[_sp],_key[_sp]); } public void pop( ) { pop(this); } public void pop( int n ) { for( int i=0; i<n; i++ ) pop(); } void popScope() { assert _tod > 0; // Something to pop? assert _sp >= _display[_tod]; // Did not over-pop already? while( _sp > _display[_tod] ) pop(); _tod--; } // Pop & return a Frame or Fcn; ref-cnt of all things remains unchanged. // Caller is responsible for tracking lifetime. public double popDbl() { assert isDbl(); return _d [--_sp]; } public String popStr() { assert isStr(); return _str[--_sp]; } public ASTOp popFcn() { assert isFcn(); ASTOp op = _fcn[--_sp]; _fcn[_sp]=null; return op; } public Frame popAry() { assert isAry(); Frame fr = _ary[--_sp]; _ary[_sp]=null; assert allAlive(fr); return fr; } public Frame peekAry() { assert isAry(); Frame fr = _ary[_sp-1]; assert allAlive(fr); return fr; } public ASTOp peekFcn() { assert isFcn(); ASTOp op = _fcn[_sp-1]; return op; } public String peekKey() { return _key[_sp-1]; } public String key() { return _key[_sp]; } // Pop frame from stack; lower refcnts... allowing to fall to zero without deletion. // Assumption is that this Frame will get pushed again shortly. public Frame popXAry() { Frame fr = popAry(); for( Vec vec : fr.vecs() ) { popVec(vec); if ( vec.masterVec() != null ) popVec(vec.masterVec()); } return fr; } public void popVec(Vec vec) { int cnt = _refcnt.get(vec)._val-1; if( cnt > 0 ) _refcnt.put(vec,new IcedInt(cnt)); else _refcnt.remove(vec); } // Replace a function invocation with it's result public void poppush( int n, Frame ary, String key) { addRef(ary); for( int i=0; i<n; i++ ) { assert _sp > 0; _sp--; _fcn[_sp] = subRef(_fcn[_sp]); _ary[_sp] = subRef(_ary[_sp], _key[_sp]); } push(1); _ary[_sp-1] = ary; _key[_sp-1] = key; assert check_all_refcnts(); } // Replace a function invocation with it's result public void poppush(double d) { pop(); push(d); } // Capture the current environment & return it (for some closure's future execution). Env capture( boolean cntrefs ) { return new Env(this,cntrefs); } private Env( Env e, boolean cntrefs ) { _sp = e._sp; _key= Arrays.copyOf(e._key,_sp); _ary= Arrays.copyOf(e._ary,_sp); _d = Arrays.copyOf(e._d ,_sp); _fcn= Arrays.copyOf(e._fcn,_sp); _str = Arrays.copyOf(e._str,_sp); _tod= e._tod; _display = e._display.clone(); if( cntrefs ) { // If counting refs _refcnt = new IcedHashMap<Vec,IcedInt>(); _refcnt.putAll(e._refcnt); // Deep copy the existing refs } else _refcnt = null; // All other fields are ignored/zero _sb = null; _locked = null; } // Nice assert boolean allAlive(Frame fr) { for( Vec vec : fr.vecs() ) assert _refcnt.get(vec)._val > 0; return true; } /** * Subtract reference count. * @param vec vector to handle * @param fs future, cannot be null * @return returns given Future */ public Futures subRef( Vec vec, Futures fs ) { assert fs != null : "Future should not be null!"; if ( vec.masterVec() != null ) subRef(vec.masterVec(), fs); int cnt = _refcnt.get(vec)._val-1; if ( cnt > 0 ) { _refcnt.put(vec,new IcedInt(cnt)); } else { UKV.remove(vec._key,fs); _refcnt.remove(vec); } return fs; } public void subRef(Vec vec) { subRef(vec, new Futures()).blockForPending(); } // Lower the refcnt on all vecs in this frame. // Immediately free all vecs with zero count. // Always return a null. public Frame subRef( Frame fr, String key ) { if( fr == null ) return null; Futures fs = new Futures(); for( Vec vec : fr.vecs() ) subRef(vec,fs); fs.blockForPending(); return null; } // Lower refcounts on all vecs captured in the inner environment public ASTOp subRef( ASTOp op ) { if( op == null ) return null; if( !(op instanceof ASTFunc) ) return null; ASTFunc fcn = (ASTFunc)op; if( fcn._env != null ) fcn._env.subRef(this); else Log.info("Popping fcn object, never executed no environ capture"); return null; } Vec addRef( Vec vec ) { IcedInt I = _refcnt.get(vec); assert I==null || I._val>0; assert vec.length() == 0 || vec.isUUID() || (vec.at(0) > 0 || vec.at(0) <= 0 || Double.isNaN(vec.at(0))); _refcnt.put(vec,new IcedInt(I==null?1:I._val+1)); if (vec.masterVec()!=null) addRef(vec.masterVec()); return vec; } // Add a refcnt to all vecs in this frame Frame addRef( Frame fr ) { if( fr == null ) return null; for( Vec vec : fr.vecs() ) addRef(vec); return fr; } ASTOp addRef( ASTOp op ) { if( op == null ) return null; if( !(op instanceof ASTFunc) ) return op; ASTFunc fcn = (ASTFunc)op; if( fcn._env != null ) fcn._env.addRef(this); else Log.info("Pushing fcn object, never executed no environ capture"); return op; } private void addRef(Env global) { for( int i=0; i<_sp; i++ ) { if( _ary[i] != null ) global.addRef(_ary[i]); if( _fcn[i] != null ) global.addRef(_fcn[i]); } } private void subRef(Env global) { for( int i=0; i<_sp; i++ ) { if( _ary[i] != null ) global.subRef(_ary[i],_key[i]); if( _fcn[i] != null ) global.subRef(_fcn[i]); } } // Remove everything public void remove_and_unlock() { // Remove all shallow scopes while( _tod > 0 ) popScope(); // Push changes at the outer scope into the K/V store while( _sp > 0 ) { if( isAry() && _key[_sp-1] != null ) { // Has a K/V mapping? Frame fr = popAry(); // Pop w/o lowering refcnt String skey = key(); Frame fr2=new Frame(Key.make(skey),fr._names.clone(),fr.vecs().clone()); for( int i=0; i<fr.numCols(); i++ ) { Vec v = fr.vecs()[i]; int refcnt = _refcnt.get(v)._val; assert refcnt > 0; if( refcnt > 1 ) { // Need a deep-copy now Vec v2 = new Frame(v).deepSlice(null,null).vecs()[0]; fr2.replace(i,v2); // Replace with private deep-copy subRef(v); // Now lower refcnt for good assertions addRef(v2); } // But not down to zero (do not delete items in global scope) } if( _locked.contains(fr2._key) ) fr2.write_lock(null); // Upgrade to write-lock else { fr2.delete_and_lock(null); _locked.add(fr2._key); } // Clear prior & set new data fr2.unlock(null); _locked.remove(fr2._key); // Unlocked already } else { popUncheck(); } } // Unlock all things that do not survive, plus also delete them for( Key k : _locked ) { Frame fr = UKV.get(k); fr.unlock(null); fr.delete(); // Should be atomic really } } // Done writing into all things. Allow rollups. public void postWrite() { for( Vec vec : _refcnt.keySet() ) vec.postWrite(); } // Count references the "hard way" - used to check refcnting math. int compute_refcnt( Vec vec ) { int cnt=0; HashSet<Vec> refs = new HashSet<Vec>(); for( int i=0; i<_sp; i++ ) if( _ary[i] != null) { for (Vec v : _ary[i].vecs()) { Vec vm; if (v.equals(vec)) cnt++; else if ((vm = v.masterVec()) !=null && vm.equals(vec)) cnt++; } } else if( _fcn[i] != null && (_fcn[i] instanceof ASTFunc) ) cnt += ((ASTFunc)_fcn[i])._env.compute_refcnt(vec); return cnt + refs.size(); } boolean check_refcnt( Vec vec ) { IcedInt I = _refcnt.get(vec); int cnt0 = I==null ? 0 : I._val; int cnt1 = compute_refcnt(vec); if( cnt0==cnt1 ) return true; Log.err("Refcnt is "+cnt0+" but computed as "+cnt1); return false; } boolean check_all_refcnts() { for (Vec v : _refcnt.keySet()) if (check_refcnt(v) == false) return false; return true; } // Pop and return the result as a string public String resultString( ) { assert _tod==0 : "Still have lexical scopes past the global"; String s = toString(_sp-1,true); pop(); return s; } public String toString(int i, boolean verbose_fcn) { if( _ary[i] != null ) return _ary[i]._key+":"+_ary[i].numRows()+"x"+_ary[i].numCols(); else if( _fcn[i] != null ) return _fcn[i].toString(verbose_fcn); else if( _str[i] != null ) return _str[i]; return Double.toString(_d[i]); } @Override public String toString() { String s="{"; for( int i=0; i<_sp; i++ ) s += toString(i,false)+","; return s+"}"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/Exec2.java
package water.exec; import water.*; import water.fvec.Frame; import water.util.Log; import java.util.ArrayList; import java.util.Stack; /** Parse and execute a generic R-like string, in the context of an H2O Cloud * @author cliffc@0xdata.com */ public class Exec2 { // Parse a string, execute it & return a Frame. // Basic types: ary (Frame), dbl (scalar double), fcn (function) // Functions are 1st class; every argument typed one of the above. // Assignment is always to in-scope variables only. // Initial environment has all Frame Keys mapped to Frame-typed variables // Big Allocation: all expressions are eval'd in a context where a large temp // is available, and all allocations are compatible with that temp. Linear- // logic style execution is guaranteed inside the big temp. Parse error if // an expression which is not provably small does not have an active temp. // Grammar: // statements := cxexpr ; statements // cxexpr := // COMPLEX expr // infix_expr // Simple RHS-expr // id = cxexpr // Shadows outer var with a ptr assignment; no copy // // Overwrites inner var; types must match. // id <- cxexpr // Alternative R syntax for assignment // id[] = cxexpr // Slice/partial assignment; id already exists // iexpr ? cxexpr : cxexpr // exprs must have equal types // infix_expr := // Leading infix expression // op1 infix_expr term* // +x but also e.g. ++--!+-!-++!3 // op1? slice term* // e.g. cos() or -sin(foo) or -+-fun()[1,2] // term : = // Infix expression // op2 infix_expr // Standard R operator prec ordering // slice := // prefix_expr // No slicing // prefix_expr[] // Whole slice // prefix_expr[cxexpr?,cxexpr?] // optional row & col slicing // prefix_expr$col // named column // prefix_expr := // val // val(cxexpr,...)* // Prefix function application, evals LEFT TO RIGHT // val := // ( cxexpr ) // Ordering evaluation // id // any visible var; will be typed // num // Scalars, treated as 1x1 // op // Built-in functions // function(v0,v1,v2) { statements; ...v0,v1,v2... } // 1st-class lexically scoped functions // function(v0,v1,v2) statement // Single statement variant // op1 := + - ! // Unary operators allowed w/out parens prefix location // op2 := + - * / % & | <= > >= != ... // Binary operators allowed w/out parens infix location // op := sgn sin cos nrow ncol isNA sqrt isTRUE year month day ... // op := min max sum sdev mean ... // op := c cbind seq quantile table ... // Various R operators public static Env exec( String str ) throws IllegalArgumentException { cluster_init(); // Preload the global environment from existing Frames ArrayList<ASTId> global = new ArrayList<ASTId>(); ArrayList<Key> locked = new ArrayList<Key> (); Env env = new Env(locked); final Key [] frameKeys = H2O.KeySnapshot.globalSnapshot().filter(new H2O.KVFilter() { @Override public boolean filter(H2O.KeyInfo k) { return k._type == TypeMap.FRAME; } }).keys(); for( Key k : frameKeys ) { // Convert all VAs to Frames Value val = DKV.get(k); if( val == null || !val.isFrame()) continue; // Bad if it's already locked by 'null', because lock by 'null' is removed when you leave Exec. // Before was adding all frames with read-shared lock here. // Should be illegal to add any keys locked by "null' to exec? (is it only unparsed keys?) // undoing. this doesn't always work (gets stack trace) Frame fr = val.get(); String kstr = k.toString(); try { env.push(fr,kstr); global.add(new ASTId(Type.ARY,kstr,0,global.size())); fr.read_lock(null); locked.add(fr._key); } catch( Exception e ) { Log.err("Exception while adding frame "+k+" to Exec env"); } } // Some global constants global.add(new ASTId(Type.DBL,"TRUE",0,global.size())); env.push(1.0); global.add(new ASTId(Type.DBL,"FALSE",0,global.size())); env.push(0.0); global.add(new ASTId(Type.DBL,"T",0,global.size())); env.push(1.0); global.add(new ASTId(Type.DBL,"F",0,global.size())); env.push(0.0); global.add(new ASTId(Type.DBL,"NA",0,global.size())); env.push(Double.NaN); global.add(new ASTId(Type.DBL,"Inf",0,global.size())); env.push(Double.POSITIVE_INFINITY); // Parse. Type-errors get caught here and throw IAE try { int argcnt = global.size(); Exec2 ex = new Exec2(str, global); AST ast = ex.parse(); env.push(global.size()-argcnt); // Push space for temps ast.exec(env); env.postWrite(); } catch( RuntimeException t ) { env.remove_and_unlock(); throw t; } return env; } // Simple parser state final String _str; final char _buf[]; // Chars from the string int _x; // Parse pointer Stack<ArrayList<ASTId>> _env; private Exec2( String str, ArrayList<ASTId> global ) { _str = str; _buf = str.toCharArray(); _env = new Stack<ArrayList<ASTId>>(); _env.push(global); } int lexical_depth() { return _env.size()-1; } AST parse() { AST ast = ASTStatement.parse(this); skipWS(); // No trailing crud return _x == _buf.length ? ast : throwErr("Junk at end of line",_buf.length-1); } // -------------------------------------------------------------------------- // Generic parsing functions // -------------------------------------------------------------------------- void skipWS() { skipWS(false); } void skipWS( boolean EOS) { while( _x < _buf.length && isWS(_buf[_x]) && (!EOS || _buf[_x]!='\n') ) _x++; } // Skip whitespace. // If c is the next char, eat it & return true // Else return false. boolean peek(char c) { return peek(c,false); } // Peek for 'c' past whitespace but not past a newline if EOS is set // (basically treat newline as the statement-end character ';' which does not // match c) boolean peek(char c, boolean EOS) { char d; while( _x < _buf.length && isWS(_buf[_x]) && _buf[_x]!='\n' ) _x++; int nx=_x; if( !EOS ) while( nx < _buf.length && isWS(_buf[nx]) ) nx++; if( nx==_buf.length || _buf[nx]!=c ) return false; _x=nx+1; return true; } // Same as peek, but throw if char not found. Always newlines are treated as whitespace AST xpeek(char c, int x, AST ast) { return peek(c,false) ? ast : throwErr("Missing '"+c+"'",x); } // True if end-of-statement (';' or '\n' or no-more-data) boolean peekEOS() { while( _x < _buf.length ) { char d = _buf[_x++]; if( d==';' || d=='\n' ) return true; if( !isWS(d) ) { _x--; return false; } } return false; } static boolean isDigit(char c) { return c>='0' && c<= '9'; } static boolean isWS(char c) { return c<=' '; } static boolean isReserved(char c) { return c=='(' || c==')' || c=='[' || c==']' || c==',' || c==':' || c==';' || c=='$'; } static boolean isLetter(char c) { return (c>='a'&&c<='z') || (c>='A' && c<='Z') || c=='_'; } static boolean isLetter2(char c) { return c=='.' || c==':' || c=='\\' || isDigit(c) || isLetter(c); } static boolean isQuote(char c) { return c=='"' || c=='\''; } // Return an ID string, or null if we get weird stuff or numbers. Valid IDs // include all the operators, except parens (function application) and assignment. // Valid IDs: + - <= > ! [ ] joe123 ABC // Invalid : +++ 0joe ( = ) 123.45 1e3 String isID() { if( _x>=_buf.length ) return null; // No characters to parse char c = _buf[_x]; // Fail on special chars in the grammar if( isReserved(c) && c != ':') return null; if (c == ':') { if (!isDigit(_buf[_x+1])) return null; } // Fail on leading numeric if( isDigit(c) ) return null; if (c == '^' && _buf[_x+1] == '-') return _str.substring(++_x -1, _x); _x++; // Accept parse of 1 char // If first char is letter, standard ID if( isLetter(c) ) { int x=_x-1; // start of ID while( _x < _buf.length && isLetter2(_buf[_x]) ) _x++; return _str.substring(x,_x); } // Check for super-special operators that are three chars of the form %*%. // These are calls to R's matrix operators. if( _x+2 <= _buf.length && c == '%' && _buf[_x+1] == '%' ) { if( _buf[_x] == '*' ) { _x+=2; return "%*%"; } if (_buf[_x] == '/' ) { _x+=2; return "%/%"; } } if (_x+2 <= _buf.length && c == '%' && _buf[_x] == '%') { _x++; return "%%"; } // If first char is special, accept 1 or 2 special chars. // i.e. allow != >= == <= but not = alone if( _x>=_buf.length ) return _str.substring(_x-1,_x); char c2=_buf[_x]; if( isDigit(c2) || isLetter(c2) || isWS(c2) || isReserved(c2) ) { if( c=='=' ) { _x--; return null; } // Equals alone is not an ID return _str.substring(_x-1,_x); } if( c=='<' && c2=='-' ) { _x--; return null; } // The other assignment operator // Must accept as single letters to avoid ambiguity if( c=='+' || c=='-' || c=='*' || c=='/' ) return _str.substring(_x-1,_x); // One letter look ahead to decide on what to accept if( c=='=' || c=='!' || c=='<' || c =='>' ) if ( c2 =='=' ) return _str.substring(++_x-2,_x); else return _str.substring(_x-1,_x); _x++; // Else accept e.g. <= >= ++ != == etc... return _str.substring(_x-2,_x); } String isString() { // returns string value without enclosing quotes if( _x>=_buf.length ) return null; // No characters to parse char c = _buf[_x]; if( isQuote(c) ) { int x=_x+1; while( x < _buf.length && _buf[x] != c )x++; return _str.substring(_x+1,x); } return null; } // isID specifically does not parse "=" or "<-". This guy does. boolean isAssign(boolean EOS) { if( peek('<',EOS) ) { if( _buf[_x]=='-' ) { _x++; return true; } else return false; } if( !peek('=',EOS) ) return false; char c = _buf[_x]; if( c != '=' ) return true; // what valid 2-letter tokens start with "="? == but not =+ => _x--; return false; } // Yet-to-be-parsed private String debug() { return new String(_buf,_x,_buf.length-_x); } // -------------------------------------------------------------------------- // Nicely report a syntax error AST throwErr( String msg, int idx ) { int lo = _x, hi=idx; if( idx < _x ) { lo = idx; hi=_x; } String s = msg+ '\n'+_str+'\n'; int i; for( i=0; i<lo; i++ ) s+= ' '; s+='^'; i++; for( ; i<hi; i++ ) s+= '-'; if( i<=hi ) s+= '^'; s += '\n'; throw new IllegalArgumentException(s); } // To avoid a class-circularity hang, we need to force other members of the // cluster to load the Exec & AST classes BEFORE trying to execute code // remotely, because e.g. ddply runs functions on all nodes. private static boolean _inited; // One-shot init private static void cluster_init() { if( _inited ) return; new DRemoteTask() { @Override public void lcompute() { new ASTPlus(); // Touch a common class to force loading tryComplete(); } @Override public void reduce( DRemoteTask dt ) { } }.invokeOnAllNodes(); _inited = true; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/Flow.java
package water.exec; import water.*; import water.fvec.*; import water.util.Utils.*; public abstract class Flow extends Iced { // Flow-Coding with Frames, Filters & GroupBy // // frame.with(filter).with(groupby).with(reducer).doit(); // // Define a pipeline of operations to perform on a frame. Can use any number // of filters, and one optional groupby, and one required reducer which must // be last in the pipe. The return result is either the instance of the // reducer, or a collection of reducers (one per Group in the GroupBy). // // All elements are passed a row from the Frame in double ds[]. // // Reducer uses "mapreduce(ds)" to collect data for one row into a reducer // object, and "this.reduce(that)" to gather chunks of reduced objects. Rows // are reduced in parallel and in any order; each row exactly once. // // Filter returns boolean to ignore or reduce the row. // // GroupBy returns a long to specify which Group this row belongs too. The // row is reduced into a seperate reducer for each group. The group ids do // not have to form a dense space, any unique long value specifies a group; a // hash table is used to gather the groups. The hashtable of reduced values // is returned. // //Frame fr = parseFrame("cars.hex", "smalldata/cars.csv"); //final int cyl_idx = fr.find("cylinders"); //final int year_idx = fr.find("year"); // //SumCol sumcols = fr. // with(new SumCol(year_idx)). // doit(); //System.out.println(sumcols._sum+"/"+sumcols._n+" = "+(sumcols._sum/sumcols._n)); // //SumCol sumcols1 = fr. // with(new Filter() { boolean filter(double ds[]) { return ds[cyl_idx]!=5; } }). // with(new SumCol(year_idx)). // doit(); //System.out.println(sumcols1._sum+"/"+sumcols1._n+" = "+(sumcols1._sum/sumcols1._n)); // //IcedHashMap<IcedLong,SumCol> sumcols2 = fr. // with(new GroupBy() { long groupId(double ds[]) { return (long)ds[cyl_idx];} }). // with(new SumCol(year_idx)). // doit(); //for( IcedLong gid : sumcols2.keySet() ) { // SumCol sumcol = sumcols2.get(gid); // System.out.println("Cyl="+gid._val+", "+sumcol._sum+"/"+sumcol._n+" = "+(sumcol._sum/sumcol._n)); //} // ----------------------- // THE PUBLIC API: public abstract static class PerRow<X extends PerRow> extends Iced { abstract public void mapreduce( double ds[] ); abstract public void reduce( X that ); abstract public X make(); @Override public String toString() { return "perRow"; } } public abstract static class Filter extends Iced { abstract public boolean filter( double ds[] ); @Override public String toString() { return "filter"; } } public abstract static class GroupBy extends Iced { abstract public long groupId( double ds[]); @Override public String toString() { return "groupBy"; } } // ----------------------- abstract Frame frame(); abstract <X extends PerRow<X>> // Type parameter PerRow<X> // Return type of doit() doit // Method name ( PerRow<X> pr, double ds[], PerRow<X> pr0 ); // Arguments for doit() public static class FlowFrame extends Flow { final Frame _fr; public FlowFrame( Frame fr ) { _fr = fr; } @Override Frame frame() { return _fr; } @Override public String toString() { return _fr.toString(); } @Override <X extends PerRow<X>> PerRow<X> doit(PerRow<X> pr, double ds[], PerRow<X> pr0) { if( pr == null ) pr = pr0.make(); pr.mapreduce(ds); return pr; } } public static class FlowFilter extends Flow { final Filter _fr; final Flow _ex; public FlowFilter( Filter fr, Flow ex ) { _fr = fr; _ex = ex;} public <Y extends PerRow<Y>> FlowPerRow<Y> with( PerRow<Y> pr ) { return new FlowPerRow<Y>(pr,this); } public FlowGroupBy with( GroupBy fr ) { return new FlowGroupBy(fr,this); } public FlowFilter with ( Filter filter){ return new FlowFilter(filter, this); } @Override Frame frame() { return _ex.frame(); } @Override public String toString() { return _ex.toString()+".with("+_fr+")"; } @Override <X extends PerRow<X>> PerRow<X> doit(PerRow<X> pr, double ds[], PerRow<X> pr0) { return _fr.filter(ds) ? _ex.doit(pr,ds,pr0) : pr; } } public static class FlowGroupBy extends Flow { final GroupBy _gb; final Flow _ex; public FlowGroupBy( GroupBy gb, Flow ex ) { _gb = gb; _ex = ex;} public <Y extends PerRow<Y>> FlowGroupPerRow<Y> with( PerRow<Y> pr ) { return new FlowGroupPerRow<Y>(pr,this); } public FlowGroupBy with( Filter fr ) { return new FlowGroupBy(_gb,new FlowFilter(fr,_ex)); } @Override Frame frame() { return _ex.frame(); } @Override public String toString() { return _ex.toString()+".with("+_gb+")"; } @Override <X extends PerRow<X>> PerRow<X> doit(PerRow<X> pr, double ds[], PerRow<X> pr0) { throw H2O.fail(); } } public static class FlowGroupPerRow<X extends PerRow<X>> extends MRTask2<FlowGroupPerRow<X>> { final PerRow<X> _pr; // Canonical example, not returned IcedHashMap<IcedLong,PerRow<X>> _prs; FlowGroupBy _ex; public FlowGroupPerRow( PerRow<X> pr, FlowGroupBy ex ) { _pr = pr; _ex = ex; } public IcedHashMap<IcedLong,X> doit() { return doAll(_ex.frame()).self(); } @Override public void map( Chunk chks[] ) { _prs = new IcedHashMap<IcedLong,PerRow<X>>(); double ds[] = new double[chks.length]; for( int i=0; i<chks[0]._len; i++ ) { // Load the internal double array for( int j=0; j<chks.length; j++ ) ds[j] = chks[j].at0(i); IcedLong gid = new IcedLong(_ex._gb.groupId(ds)); PerRow<X> pr1 = _prs.get(gid); PerRow<X> pr2 = _ex._ex.doit(pr1,ds,_pr); if( pr1 == null && pr2 != null ) _prs.put(gid,pr2); } } @Override public void reduce( FlowGroupPerRow<X> that ) { for( IcedLong gid : that._prs.keySet() ) { PerRow<X> that_pr = that._prs.get(gid); PerRow<X> this_pr = this._prs.get(gid); if( that_pr != null ) { if( this_pr != null ) this_pr.reduce((X)that_pr); else this._prs.put(gid,that_pr); } } } IcedHashMap<IcedLong,X> self() { return (IcedHashMap<IcedLong,X>)_prs; } @Override public String toString() { return _ex.toString()+".with("+_pr+")"; } } public static class FlowPerRow<X extends PerRow<X>> extends MRTask2<FlowPerRow<X>> { PerRow<X> _pr; Flow _ex; public FlowPerRow( PerRow<X> pr, Flow ex ) { _pr = pr; _ex = ex;} public X doit() { return doAll(_ex.frame()).self(); } @Override public void map( Chunk chks[] ) { _pr = _pr.make(); double ds[] = new double[chks.length]; for( int i=0; i<chks[0]._len; i++ ) { // Load the internal double array for( int j=0; j<chks.length; j++ ) ds[j] = chks[j].at0(i); _ex.doit(_pr,ds,_pr); } } @Override public void reduce( FlowPerRow<X> ebpr ) { _pr.reduce(ebpr.self()); } X self() { return (X)_pr; } @Override public String toString() { return _ex.toString()+".with("+_pr+")"; } } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/exec/Type.java
package water.exec; import java.util.Arrays; import water.Iced; /** Typing system for a generic R-like parser. * Supports Hindley-Milner style type inference. * @author cliffc@0xdata.com */ // -------------------------------------------------------------------------- public class Type extends Iced { final static private int UNBOUND= 0; final static private int BOUND = 1; // to _ts[0] final static private int DBL0 = 2; final static private int ARY0 = 3; final static private int FCN0 = 4; // Return type in _ts[0], args in _ts[1...]; final static private int DBLARY0= 5; // Type is either DBL or ARY but not FCN final static private int ANYARY0= 6; // Type is ARY if any ts[] is an ARY, else DBL final static private int STR0 = 7; final static private int VARARGS=32; // OR'd onto last type in a fcn, allows zero or more of this type int _t; // One of the above #s static private int UNIQUE; // Unique ID handy for debugging final int _x = UNIQUE++; // Point in program where type is 1st defined. Type[] _ts; // null==prim, else fcn and _ts[0] is return, _ts[1+...] are arg types Type( int t, Type[] ts ) { assert varargs_clean(t,ts); _t=t; _ts=ts; } Type( int t, Type[] ts, float f ) { this(t,ts); _t|=VARARGS;} Type copy() { Type[] ts = null; if (_ts!=null) { ts =_ts.clone(); for (int i = 0; i < ts.length; i++) if (_ts[i]!=null) ts[i] = _ts[i].copy(); } int vararg = _t&VARARGS; Type copy = new Type(_t&~VARARGS,ts); copy._t |= vararg; return copy; } // Check no varargs flags, except on the last type of functions private boolean varargs_clean( int t, Type ts[] ) { if( (t&VARARGS)!=0 ) return false; // Need to clean this upfront if( t!=FCN0 || ts==null ) return true; for( int i=0; i<ts.length-1; i++ ) if( ts[i] != null && (ts[i]._t&VARARGS)!=0 ) return false; return true; } // Make some base types static Type DBL = new Type(DBL0,null); static Type ARY = new Type(ARY0,null); static Type STR = new Type(STR0, null); public static Type unbound() { return new Type(UNBOUND,new Type[1]); } public static Type fcn(Type[] ts) { return new Type(FCN0,ts); } public static Type varargs(Type t) { return new Type(t._t,t._ts,1f);} public static Type dblary() { return new Type(DBLARY0,new Type[1]); } public static Type anyary(Type ts[]) { return new Type(ANYARY0,ts); } // Tarjan Union-Find Type find() { Type t = this; if( _t==BOUND ) t=_ts[0]=_ts[0].find(); if( t._t!=ANYARY0 ) return t; return t.findAnyAry(); } // "anyary" was my 1st attempt at a Union-Type. It's not going to work so // easily. Need back-ptrs from the component types to the different // union-type flavors. Then when union'ing a component, I can visit types // constructed from the component & union them also as needed. For IfElse, I // need the True & False types, the Test type and the Result type. These // combo's are legal, and all others illegal: // rez tst T F // D D D D // A A D A // A A A D // A A A A // A D A A // F D F F // and all Fcns are union'd // // DA DA D DA // a single Dbl is not constraining // DA DA DA D // DA D DA DA // A DA A A // A DA A DA // Any array means the result is ary // A DA DA A // A A DA DA // weird: at least one of DA must be an A // A DA DA DA // // DA DA DA DA // no functions // // U D U U // could be all Fcns or any other mix // U DA U U // Most general allowed type for IfElse // Drop DBL's, drop dups // If any are ARY, can only be ARY or fail // If FCNs, all must be equal // Return any singular type. private Type findAnyAry() { int len=0; Type fun=null; for( int i=0; i<_ts.length; i++ ) { Type t = _ts[i].find(); if( t._t == FCN0 && fun != null ) { t.union(fun); t=fun=t.find(); } else { if( t._t == FCN0 ) fun = t; if( t._t != DBL0 && t._t != STR0 && // Keep non-DBL & non-STR !dupType(len,t) ) // But remove dups _ts[len++] = t; } } // No more types? Defaults to DBL if( len == 0 ) { _t=BOUND; return (_ts[0] = DBL); } // Single variant type? Defaults to that type if( len == 1 ) { _t=BOUND; return _ts[0]; } if( len < _ts.length ) _ts = Arrays.copyOf(_ts, len); return this; } private boolean dupType( int len, Type t ) { for( int j=0; j<len; j++ ) if( _ts[j]==t ) return true; return false; } boolean union( Type t ) { Type ta= find(); Type tb=t.find(); int tta = ta._t&(VARARGS-1); // Strip off varargs int ttb = tb._t&(VARARGS-1); // Strip off varargs if( ta==tb ) return true; else if( (tta== FCN0 && ttb== FCN0) || // Functions are equal? (tta==ANYARY0 && ttb==ANYARY0) ) { // AnyArys are equal? // Structural breakdown of function-type equality. // Made more complex by allowing varargs types. Type t0 = ta, t1 = tb; // Shorter type in t0 if( ta._ts.length>tb._ts.length ) { t0=tb; t1=ta; } // Walk the shorter list, checking types boolean ok=true; int len=t0._ts.length; Type varargs=null; // Extra args in T1 can only be matched with a varargs repeat from T0 if( len < t1._ts.length ) { varargs = t0._ts[len-1].find(); if( (varargs._t&VARARGS)!=0 ) len--; // Dont match the varargs arg in 1st loop else varargs=null; // Else not a varargs } for( int i=0; i<len; i++ ) // Match all args if( !t0._ts[i].union(t1._ts[i]) ) ok = false; // Subtypes are unequal if( len == t1._ts.length ) return ok; if( len == t1._ts.length-1 && (t1._ts[len].find()._t&VARARGS) != 0 ) return true; // Also ok for a zero-length varargs in t1, and no arg in t0 if( varargs==null ) return false; // Must be varargs: for( int i=len; i<t1._ts.length; i++ ) { int tvar = (varargs._t&(VARARGS-1)); Type var = tvar==DBLARY0 ? dblary() : (tvar==UNBOUND ? unbound() : varargs); // Use a new unbound type if( !var.union(t1._ts[i]) ) ok = false; // Subtypes are unequal } return ok; } else if( tta==UNBOUND || (tta==DBLARY0 && tb.isDblAry()) ) { ta._t=BOUND; ta._ts[0]= tb; } else if( ttb==UNBOUND || (ttb==DBLARY0 && ta.isDblAry()) ) { tb._t=BOUND; tb._ts[0]= ta; } else if( tta==DBLARY0 && ttb==DBLARY0 ) { ta._t=BOUND; ta._ts[0]=tb; } else if( tta==ANYARY0 && ttb==DBLARY0 ) throw water.H2O.unimpl(); // ??? else if( tta==ANYARY0 && ttb==ARY0 ) throw water.H2O.unimpl(); // ?one of many must be an array? else if( tta==ANYARY0 && ttb==DBL0 ) { // Force all to DBL boolean ok=true; for( Type t2 : ta._ts ) ok |= !Type.DBL.union(t2); return ok; } else if( ttb==ANYARY0 ) throw water.H2O.unimpl(); else if( tta==ttb ) return true; // Equal after varargs stripping else return false; // Types are unequal return true; } // If clearly not a function. False for unbound variables, which might // become "not a function" later. boolean isUnbound(){ Type t=find(); return t._t==UNBOUND; } boolean isAry() { Type t=find(); return t._t==ARY0; } boolean isDbl() { Type t=find(); return t._t==DBL0; } boolean isFcn() { Type t=find(); return t._t==FCN0; } boolean isNotFun() { Type t=find(); return t._t==DBL0 || t._t==ARY0 || t._t==DBLARY0 || t._t==STR0; } boolean isDblAry() { Type t=find(); return t._t==DBL0 || t._t==ARY0; } boolean isStr() { Type t=find(); return t._t==STR0; } // Return type of functions public Type ret() { Type t=find(); assert t._t == FCN0; return t._ts[0].find(); } @Override public String toString() { String s=null; switch( _t&(VARARGS-1) ) { case UNBOUND: s = "@"+_x; break; case BOUND: s = _ts[0].toString(); break; case DBL0: s = "dbl"; break; case ARY0: s = "ary"; break; case DBLARY0: s = "dblary"; break; case STR0: s = "str"; break; case ANYARY0: { s = "anyary{"; for( Type t : _ts ) s += t+","; s += "}"; break; } case FCN0: { s = _ts[0]+"("; for( int i=1; i<_ts.length-1; i++ ) s += _ts[i]+","; if( _ts.length > 1 ) s += _ts[_ts.length-1]; s += ")"; break; } default: throw water.H2O.unimpl(); } if( (_t&VARARGS)!=0 ) s += "..."; return s; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/AppendableVec.java
package water.fvec; import water.*; import water.util.Utils; import java.util.Arrays; /** * A NEW single distributed vector column. * * The NEW vector has no data, and takes no space. It supports distributed * parallel writes to it, via calls to append2. Such writes happen in parallel * and all writes are ordered. Writes *will* be local to the node doing them, * specifically to allow control over locality. By default, writes will go * local-homed chunks with no compression; there is a final 'close' to the NEW * vector which may do compression; the final 'close' will return some other * Vec type. NEW Vectors do NOT support reads! */ public class AppendableVec extends Vec { long _espc[]; public static final byte NA = 1; public static final byte ENUM = 2; public static final byte NUMBER = 4; public static final byte TIME = 8; public static final byte UUID =16; byte [] _chunkTypes; long _naCnt; long _strCnt; final long _timCnt[] = new long[ParseTime.TIME_PARSE.length]; long _totalCnt; public AppendableVec( Key key) { super(key, (long[])null); _espc = new long[4]; _chunkTypes = new byte[4]; } // A NewVector chunk was "closed" - completed. Add it's info to the roll-up. // This call is made in parallel across all node-local created chunks, but is // not called distributed. synchronized void closeChunk( NewChunk chk) { final int cidx = chk._cidx; while( cidx >= _espc.length ) { _espc = Arrays.copyOf(_espc,_espc.length<<1); _chunkTypes = Arrays.copyOf(_chunkTypes,_chunkTypes.length<<1); } _espc[cidx] = chk._len; _chunkTypes[cidx] = chk.type(); _naCnt += chk._naCnt; _strCnt += chk._strCnt; for( int i=0; i<_timCnt.length; i++ ) _timCnt[i] += chk._timCnt[i]; _totalCnt += chk._len; } // What kind of data did we find? NA's? Strings-only? Floats or Ints? boolean shouldBeEnum() { // We declare column to be string/enum only if it does not have ANY numbers in it. return _strCnt > 0 && (_strCnt + _naCnt) == _totalCnt; } // Class 'reduce' call on new vectors; to combine the roll-up info. // Called single-threaded from the M/R framework. public void reduce( AppendableVec nv ) { if( this == nv ) return; // Trivially done // Combine arrays of elements-per-chunk long e1[] = nv._espc; // Shorter array of longs? byte t1[] = nv._chunkTypes; if( e1.length > _espc.length ) { e1 = _espc; // Keep the shorter one in e1 t1 = _chunkTypes; _espc = nv._espc; // Keep longer in the object _chunkTypes = nv._chunkTypes; } for( int i=0; i<e1.length; i++ ){ // Copy non-zero elements over assert _chunkTypes[i] == 0 || t1[i] == 0; if( e1[i] != 0 && _espc[i]==0 ) _espc[i] = e1[i]; _chunkTypes[i] |= t1[i]; } _naCnt += nv._naCnt; _strCnt += nv._strCnt; Utils.add(_timCnt,nv._timCnt); _totalCnt += nv._totalCnt; } // "Close" out a NEW vector - rewrite it to a plain Vec that supports random // reads, plus computes rows-per-chunk, min/max/mean, etc. public Vec close(Futures fs) { // Compute #chunks int nchunk = _espc.length; DKV.remove(chunkKey(nchunk),fs); // remove potential trailing key while( nchunk > 0 && _espc[nchunk-1] == 0 ) { nchunk--; DKV.remove(chunkKey(nchunk),fs); // remove potential trailing key } boolean hasNumber = false, hasEnum = false, hasTime=false, hasUUID=false; for( int i = 0; i < nchunk; ++i ) { if( (_chunkTypes[i] & TIME ) != 0 ) { hasNumber = true; hasTime=true; } if( (_chunkTypes[i] & NUMBER) != 0 ) hasNumber = true; if( (_chunkTypes[i] & ENUM ) != 0 ) hasEnum = true; if( (_chunkTypes[i] & UUID ) != 0 ) hasUUID = true; } // number wins, we need to go through the enum chunks and declare them all // NAs (chunk is considered enum iff it has only enums + possibly some nas) if( hasNumber && hasEnum ) { for(int i = 0; i < nchunk; ++i) if(_chunkTypes[i] == ENUM) DKV.put(chunkKey(i), new C0DChunk(Double.NaN, (int)_espc[i]),fs); } // UUID wins over enum & number if( hasUUID && (hasEnum || hasNumber) ) { hasEnum=hasNumber=false; for(int i = 0; i < nchunk; ++i) if((_chunkTypes[i] & UUID)==0) DKV.put(chunkKey(i), new C0DChunk(Double.NaN, (int)_espc[i]),fs); } // Make sure time is consistent int t = -1; if( hasTime ) { // Find common time parse, and all zeros - or inconsistent time parse for( int i=0; i<_timCnt.length; i++ ) if( _timCnt[i] != 0 ) if( t== -1 ) t=i; // common time parse else t = -2; // inconsistent parse if( t < 0 ) // blow off time parse for(int i = 0; i < nchunk; ++i) if(_chunkTypes[i] == TIME) DKV.put(chunkKey(i), new C0DChunk(Double.NaN, (int)_espc[i]),fs); } assert t<0 || _domain == null; // Compute elems-per-chunk. // Roll-up elem counts, so espc[i] is the starting element# of chunk i. long espc[] = new long[nchunk+1]; // Shorter array long x=0; // Total row count so far for( int i=0; i<nchunk; i++ ) { espc[i] = x; // Start elem# for chunk i x += _espc[i]; // Raise total elem count } espc[nchunk]=x; // Total element count in last // Replacement plain Vec for AppendableVec. Vec vec = new Vec(_key, espc, _domain, hasUUID, (byte)t); DKV.put(_key,vec,fs); // Inject the header return vec; } // Default read/write behavior for AppendableVecs @Override public boolean readable() { return false; } @Override public boolean writable() { return true ; } @Override public Chunk chunkForChunkIdx(int cidx) { return new NewChunk(this,cidx); } // None of these are supposed to be called while building the new vector @Override public Value chunkIdx( int cidx ) { throw H2O.fail(); } @Override public long length() { throw H2O.fail(); } @Override public int nChunks() { throw H2O.fail(); } @Override int elem2ChunkIdx( long i ) { throw H2O.fail(); } @Override public long chunk2StartElem( int cidx ) { throw H2O.fail(); } @Override public long byteSize() { return 0; } @Override public String toString() { return "[AppendableVec, unknown size]"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/ByteVec.java
package water.fvec; import water.Job.ProgressMonitor; import water.Key; import java.io.IOException; import java.io.InputStream; /** * A vector of plain Bytes. */ public class ByteVec extends Vec { ByteVec( Key key, long espc[] ) { super(key,espc); } public C1NChunk chunkForChunkIdx(int cidx) { return (C1NChunk)super.chunkForChunkIdx(cidx); } /** Open a stream view over the underlying data */ public InputStream openStream(final ProgressMonitor pmon) { return new InputStream() { final long [] sz = new long[1]; private int _cidx, _sz; private C1NChunk _c0; @Override public int available() throws IOException { if( _c0 == null || _sz >= _c0._len ) { sz[0] += _c0 != null?_c0._len:0; if(_cidx >= nChunks() )return 0; _c0 = chunkForChunkIdx(_cidx++); _sz = C1NChunk.OFF; if( pmon != null ) pmon.update(_c0._len); } return _c0._len-_sz; } @Override public void close() { _cidx = nChunks(); _c0 = null; _sz = 0;} @Override public int read() throws IOException { return available() == 0 ? -1 : 0xFF&_c0._mem[_sz++]; } @Override public int read(byte[] b, int off, int len) throws IOException { int sz = available(); if( sz == 0 ) return -1; len = Math.min(len,sz); System.arraycopy(_c0._mem,_sz,b,off,len); _sz += len; return len; } }; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C0DChunk.java
package water.fvec; import water.AutoBuffer; import water.UDP; /** * The constant 'double' column. */ public class C0DChunk extends Chunk { static final int OFF=8+4; double _con; public C0DChunk(double con, int len) { _mem=new byte[OFF]; _start = -1; _len = len; _con = con; UDP.set8d(_mem,0,con); UDP.set4(_mem,8,len); } @Override protected final long at8_impl( int i ) { if( Double.isNaN(_con) ) throw new IllegalArgumentException("at8 but value is missing"); return (long)_con; // Possible silent truncation } @Override protected final double atd_impl( int i ) {return _con;} @Override protected final boolean isNA_impl( int i ) { return Double.isNaN(_con); } @Override boolean set_impl(int idx, long l) { return l==_con; } @Override boolean set_impl(int i, double d) { return d==_con; } @Override boolean set_impl(int i, float f ) { return f==_con; } @Override boolean setNA_impl(int i) { return Double.isNaN(_con); } @Override boolean hasFloat() { return (long)_con!=_con; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C0DChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _con = UDP.get8d(_mem,0); _len = UDP.get4(_mem,8); return this; } @Override public int sparseLen(){return _con == 0?0:_len;} @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); if(_con == 0) { nc.addZeros(len()); } else { for (int i=0; i<len(); ++i) nc.addNum(_con); } return nc; } // 3.3333333e33 public int pformat_len0() { return 22; } public String pformat0() { return "% 21.15e"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C0LChunk.java
package water.fvec; import java.util.Arrays; import water.AutoBuffer; import water.UDP; /** * The constant 'long' column. */ public class C0LChunk extends Chunk { static final int OFF=8+4; long _con; public C0LChunk(long con, int len) { _mem=new byte[OFF]; _start = -1; _len = len; _con = con; UDP.set8(_mem,0,con); UDP.set4(_mem,8,len); } @Override protected final long at8_impl( int i ) { return _con; } @Override protected final double atd_impl( int i ) {return _con; } @Override protected final boolean isNA_impl( int i ) { return false; } @Override boolean set_impl(int idx, long l) { return l==_con; } @Override boolean set_impl(int i, double d) { return d==_con; } @Override boolean set_impl(int i, float f ) { return f==_con; } @Override boolean setNA_impl(int i) { return false; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C0LChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _con = UDP.get8(_mem,0); _len = UDP.get4(_mem,8); return this; } @Override NewChunk inflate_impl(NewChunk nc) { if(_con != 0) { nc.alloc_mantissa(len()); Arrays.fill(nc.mantissa(), _con); nc.alloc_exponent(len()); nc.set_len(nc.set_sparseLen(len())); } else { nc.set_len(nc.set_sparseLen(0)); nc.set_sparse(0); nc.addZeros(len()); assert(nc.sparseLen() == 0); } return nc; } @Override public int sparseLen(){return _con == 0?0:_len;} }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C16Chunk.java
package water.fvec; import water.*; /** * The empty-compression function, where data is in long-pairs for UUIDs */ public class C16Chunk extends Chunk { protected static final long _LO_NA = Long.MAX_VALUE; protected static final long _HI_NA = 0; C16Chunk( byte[] bs ) { _mem=bs; _start = -1; _len = _mem.length>>4; } @Override protected final long at8_impl( int i ) { throw new IllegalArgumentException("at8 but 16-byte UUID"); } @Override protected final double atd_impl( int i ) { throw new IllegalArgumentException("atd but 16-byte UUID"); } @Override protected final boolean isNA_impl( int i ) { return UDP.get8(_mem,(i<<4))==_LO_NA && UDP.get8(_mem,(i<<4)+8)==_HI_NA; } @Override protected long at16l_impl(int idx) { long lo = UDP.get8(_mem,(idx<<4) ); long hi = UDP.get8(_mem,(idx<<4)+8); if( lo==_LO_NA && hi==_HI_NA ) throw new IllegalArgumentException("at16 but value is missing"); return lo; } @Override protected long at16h_impl(int idx) { long lo = UDP.get8(_mem,(idx<<4) ); long hi = UDP.get8(_mem,(idx<<4)+8); if( lo==_LO_NA && hi==_HI_NA ) throw new IllegalArgumentException("at16 but value is missing"); return hi; } @Override boolean set_impl(int idx, long l) { return false; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { UDP.set8(_mem,(idx<<4),_LO_NA); UDP.set8(_mem,(idx<<4),_HI_NA); return true; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C16Chunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length>>4; assert _mem.length == _len<<4; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); for( int i=0; i< len(); i++ ) { long lo = UDP.get8(_mem,(i<<4) ); long hi = UDP.get8(_mem,(i << 4) + 8); nc.addUUID(lo, hi); } return nc; } public int pformat_len0() { return 36; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C1Chunk.java
package water.fvec; import water.*; /** * The empty-compression function, if all elements fit directly on UNSIGNED bytes. * Cannot store 0xFF, the value is a marker for N/A. */ public class C1Chunk extends Chunk { static final int OFF=0; static protected final long _NA = 0xFF; C1Chunk(byte[] bs) { _mem=bs; _start = -1; _len = _mem.length; } @Override protected final long at8_impl( int i ) { long res = 0xFF&_mem[i+OFF]; if( res == _NA ) throw new IllegalArgumentException("at8 but value is missing"); return res; } @Override protected final double atd_impl( int i ) { long res = 0xFF&_mem[i+OFF]; return (res == _NA)?Double.NaN:res; } @Override protected final boolean isNA_impl( int i ) { return (0xFF&_mem[i+OFF]) == _NA; } @Override boolean set_impl(int i, long l) { if( !(0 <= l && l < 255) ) return false; _mem[i+OFF] = (byte)l; return true; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { _mem[idx+OFF] = (byte)_NA; return true; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C1Chunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = 0xFF&_mem[i+OFF]; if( res == _NA ) nc.addNA(); else nc.addNum(res,0); } return nc; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C1NChunk.java
package water.fvec; import water.*; /** * The empty-compression function, if all elements fit directly on UNSIGNED bytes. * [In particular, this is the compression style for data read in from files.] */ public class C1NChunk extends Chunk { static final int OFF=0; C1NChunk(byte[] bs) { _mem=bs; _start = -1; _len = _mem.length; } @Override protected final long at8_impl( int i ) { return 0xFF&_mem[i]; } @Override protected final double atd_impl( int i ) { return 0xFF&_mem[i]; } @Override protected final boolean isNA_impl( int i ) { return false; } @Override boolean set_impl(int i, long l ) { return false; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { return false; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C1NChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.alloc_exponent(len()); nc.alloc_mantissa(len()); for( int i=0; i< len(); i++ ) nc.mantissa()[i] = 0xFF&_mem[i+OFF]; nc.set_len(nc.set_sparseLen(len())); return nc; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C1SChunk.java
package water.fvec; import java.util.Arrays; import water.*; /** * The scale/bias function, where data is in SIGNED bytes before scaling. */ public class C1SChunk extends Chunk { static final int OFF=8+8; public double _scale; long _bias; C1SChunk( byte[] bs, long bias, double scale ) { _mem=bs; _start = -1; _len = _mem.length-OFF; _bias = bias; _scale = scale; UDP.set8d(_mem,0,scale); UDP.set8 (_mem,8,bias ); } @Override protected final long at8_impl( int i ) { long res = 0xFF&_mem[i+OFF]; if( res == C1Chunk._NA ) throw new IllegalArgumentException("at8 but value is missing"); return (long)((res+_bias)*_scale); } @Override protected final double atd_impl( int i ) { long res = 0xFF&_mem[i+OFF]; return (res == C1Chunk._NA)?Double.NaN:(res+_bias)*_scale; } @Override protected final boolean isNA_impl( int i ) { return (0xFF&_mem[i+OFF]) == C1Chunk._NA; } @Override boolean set_impl(int i, long l) { long res = (long)(l/_scale)-_bias; // Compressed value double d = (res+_bias)*_scale; // Reverse it if( (long)d != l ) return false; // Does not reverse cleanly? if( !(0 <= res && res < 255) ) return false; // Out-o-range for a byte array _mem[i+OFF] = (byte)res; return true; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { _mem[idx+OFF] = (byte)C1Chunk._NA; return true; } @Override boolean hasFloat() { return _scale < 1.0 || _scale > Long.MAX_VALUE; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C1SChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length-OFF; _scale= UDP.get8d(_mem,0); _bias = UDP.get8 (_mem,8); return this; } @Override NewChunk inflate_impl(NewChunk nc) { double dx = Math.log10(_scale); assert PrettyPrint.fitsIntoInt(dx); nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = 0xFF&_mem[i+OFF]; if( res == C1Chunk._NA ) nc.addNA(); else nc.addNum((res+_bias),(int)dx); } return nc; } public int pformat_len0() { return hasFloat() ? pformat_len0(_scale,3) : super.pformat_len0(); } public String pformat0() { return hasFloat() ? "% 8.2e" : super.pformat0(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C2Chunk.java
package water.fvec; import water.*; /** * The empty-compression function, where data is in shorts. */ public class C2Chunk extends Chunk { static protected final long _NA = Short.MIN_VALUE; static final int OFF=0; C2Chunk( byte[] bs ) { _mem=bs; _start = -1; _len = _mem.length>>1; } @Override protected final long at8_impl( int i ) { int res = UDP.get2(_mem,(i<<1)+OFF); if( res == _NA ) throw new IllegalArgumentException("at8 but value is missing"); return res; } @Override protected final double atd_impl( int i ) { int res = UDP.get2(_mem,(i<<1)+OFF); return res == _NA?Double.NaN:res; } @Override protected final boolean isNA_impl( int i ) { return UDP.get2(_mem,(i<<1)+OFF) == _NA; } @Override boolean set_impl(int idx, long l) { if( !(Short.MIN_VALUE < l && l <= Short.MAX_VALUE) ) return false; UDP.set2(_mem,(idx<<1)+OFF,(short)l); return true; } @Override boolean set_impl(int idx, double d) { if( Double.isNaN(d) ) return setNA_impl(idx); long l = (long)d; return l==d ? set_impl(idx,l) : false; } @Override boolean set_impl(int i, float f ) { return set_impl(i,(double)f); } @Override boolean setNA_impl(int idx) { UDP.set2(_mem,(idx<<1)+OFF,(short)_NA); return true; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C2Chunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length>>1; assert _mem.length == _len<<1; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = UDP.get2(_mem,(i<<1)+OFF); if( res == _NA ) nc.addNA(); else nc.addNum(res,0); } return nc; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C2SChunk.java
package water.fvec; import java.util.Arrays; import water.*; /** * The scale/bias function, where data is in SIGNED bytes before scaling. */ public class C2SChunk extends Chunk { static final int OFF=8+8; public double _scale; long _bias; C2SChunk( byte[] bs, long bias, double scale ) { _mem=bs; _start = -1; _len = (_mem.length-OFF)>>1; _bias = bias; _scale = scale; UDP.set8d(_mem,0,scale); UDP.set8 (_mem,8,bias ); } @Override protected final long at8_impl( int i ) { long res = UDP.get2(_mem,(i<<1)+OFF); if( res == C2Chunk._NA ) throw new IllegalArgumentException("at8 but value is missing"); return (long)((res + _bias)*_scale); } @Override protected final double atd_impl( int i ) { long res = UDP.get2(_mem,(i<<1)+OFF); return (res == C2Chunk._NA)?Double.NaN:(res + _bias)*_scale; } @Override protected final boolean isNA_impl( int i ) { return UDP.get2(_mem,(i<<1)+OFF) == C2Chunk._NA; } @Override boolean set_impl(int idx, long l) { long res = (long)(l/_scale)-_bias; // Compressed value double d = (res+_bias)*_scale; // Reverse it if( (long)d != l ) return false; // Does not reverse cleanly? if( !(Short.MIN_VALUE < res && res <= Short.MAX_VALUE) ) return false; // Out-o-range for a short array UDP.set2(_mem,(idx<<1)+OFF,(short)res); return true; } @Override boolean set_impl(int i, double d) { short s = (short)((d/_scale)-_bias); if( s == C2Chunk._NA ) return false; double d2 = (s+_bias)*_scale; if( d!=d2 ) return false; UDP.set2(_mem,(i<<1)+OFF,s); return true; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { UDP.set2(_mem,(idx<<1)+OFF,(short)C2Chunk._NA); return true; } @Override boolean hasFloat() { return _scale < 1.0; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C2SChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = (_mem.length-OFF)>>1; _scale= UDP.get8d(_mem,0); _bias = UDP.get8 (_mem,8); return this; } @Override NewChunk inflate_impl(NewChunk nc) { double dx = Math.log10(_scale); assert PrettyPrint.fitsIntoInt(dx); nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = UDP.get2(_mem,(i<<1)+OFF); if( res == C2Chunk._NA ) nc.addNA(); else nc.addNum((res+_bias),(int)dx); } return nc; } public int pformat_len0() { if( _scale==0.01 ) return 5; return hasFloat() ? pformat_len0(_scale,5) : super.pformat_len0(); } public String pformat0() { if( _scale==0.01 ) return "%7.2f"; return hasFloat() ? "% 10.4e" : super.pformat0(); } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C4Chunk.java
package water.fvec; import water.*; /** * The empty-compression function, where data is in 'int's. */ public class C4Chunk extends Chunk { static protected final long _NA = Integer.MIN_VALUE; C4Chunk( byte[] bs ) { _mem=bs; _start = -1; _len = _mem.length>>2; } @Override protected final long at8_impl( int i ) { long res = UDP.get4(_mem,i<<2); if( res == _NA ) throw new IllegalArgumentException("at8 but value is missing"); return res; } @Override protected final double atd_impl( int i ) { long res = UDP.get4(_mem,i<<2); return res == _NA?Double.NaN:res; } @Override protected final boolean isNA_impl( int i ) { return UDP.get4(_mem,i<<2) == _NA; } @Override boolean set_impl(int idx, long l) { if( !(Integer.MIN_VALUE < l && l <= Integer.MAX_VALUE) ) return false; UDP.set4(_mem,idx<<2,(int)l); return true; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { UDP.set4(_mem,(idx<<2),(int)_NA); return true; } @Override boolean hasFloat() { return false; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C4Chunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length>>2; assert _mem.length == _len<<2; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = UDP.get4(_mem,(i<<2)); if( res == _NA ) nc.addNA(); else nc.addNum(res,0); } return nc; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C4FChunk.java
package water.fvec; import water.*; /** * The empty-compression function, where data is in 'float's. */ public class C4FChunk extends Chunk { C4FChunk( byte[] bs ) { _mem=bs; _start = -1; _len = _mem.length>>2; } @Override protected final long at8_impl( int i ) { float res = UDP.get4f(_mem,i<<2); if( Float.isNaN(res) ) throw new IllegalArgumentException("at8 but value is missing"); return (long)res; } @Override protected final double atd_impl( int i ) { float res = UDP.get4f(_mem,i<<2); return Float.isNaN(res)?Double.NaN:res; } @Override protected final boolean isNA_impl( int i ) { return Float.isNaN(UDP.get4f(_mem,i<<2)); } @Override boolean set_impl(int idx, long l) { return false; } @Override boolean set_impl(int i, double d) { return false; } @Override boolean set_impl(int i, float f ) { UDP.set4f(_mem,i<<2,f); return true; } @Override boolean setNA_impl(int idx) { UDP.set4f(_mem,(idx<<2),Float.NaN); return true; } @Override boolean hasFloat() { return true; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C4FChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = _mem.length>>2; assert _mem.length == _len<<2; return this; } @Override NewChunk inflate_impl(NewChunk nc) { nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { float res = UDP.get4f(_mem,(i<<2)); if( Float.isNaN(res) ) nc.addNum(Double.NaN); else nc.addNum(res); } return nc; } // 3.3333333e33 public int pformat_len0() { return 14; } public String pformat0() { return "% 13.7e"; } }
0
java-sources/ai/h2o/h2o-classic/2.8/water
java-sources/ai/h2o/h2o-classic/2.8/water/fvec/C4SChunk.java
package water.fvec; import java.util.Arrays; import water.*; /** * The scale/bias function, where data is in SIGNED bytes before scaling. */ public class C4SChunk extends Chunk { static private final long _NA = Integer.MIN_VALUE; static final int OFF=8+8; public double _scale; long _bias; C4SChunk( byte[] bs, long bias, double scale ) { _mem=bs; _start = -1; _len = (_mem.length-OFF)>>2; _bias = bias; _scale = scale; UDP.set8d(_mem,0,scale); UDP.set8(_mem, 8, bias); } @Override protected final long at8_impl( int i ) { long res = UDP.get4(_mem,(i<<2)+OFF); if( res == _NA ) throw new IllegalArgumentException("at8 but value is missing"); return (long)((res + _bias)*_scale); } @Override protected final double atd_impl( int i ) { long res = UDP.get4(_mem,(i<<2)+OFF); return (res == _NA)?Double.NaN:(res + _bias)*_scale; } @Override protected final boolean isNA_impl( int i ) { return UDP.get4(_mem,(i<<2)+OFF) == _NA; } @Override boolean set_impl(int idx, long l) { long res = (long)(l/_scale)-_bias; // Compressed value double d = (res+_bias)*_scale; // Reverse it if( (long)d != l ) return false; // Does not reverse cleanly? if( !(Integer.MIN_VALUE < res && res <= Integer.MAX_VALUE) ) return false; // Out-o-range for a int array UDP.set4(_mem,(idx<<2)+OFF,(int)res); return true; } @Override boolean set_impl(int i, double d) { throw H2O.unimpl(); } @Override boolean set_impl(int i, float f ) { return false; } @Override boolean setNA_impl(int idx) { UDP.set4(_mem,(idx<<2)+OFF,(int)_NA); return true; } @Override boolean hasFloat() { return _scale < 1.0; } @Override public AutoBuffer write(AutoBuffer bb) { return bb.putA1(_mem,_mem.length); } @Override public C4SChunk read(AutoBuffer bb) { _mem = bb.bufClose(); _start = -1; _len = (_mem.length-OFF)>>2; _scale= UDP.get8d(_mem,0); _bias = UDP.get8(_mem, 8); return this; } @Override NewChunk inflate_impl(NewChunk nc) { double dx = Math.log10(_scale); assert PrettyPrint.fitsIntoInt(dx); nc.set_len(nc.set_sparseLen(0)); final int len = len(); for( int i=0; i<len; i++ ) { int res = UDP.get4(_mem,(i<<2)+OFF); if( res == _NA ) nc.addNA(); else nc.addNum(res+_bias,(int)dx); } return nc; } public int pformat_len0() { return pformat_len0(_scale,5); } public String pformat0() { return "% 10.4e"; } }