index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/SingleThreadedExecution.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SingleThreadedExecution.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Class annotation that can be used to indicate that something should
* be executed in a non-parallel manner - i.e. only one instance of
* the class should be executing at any one time in the JVM
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface SingleThreadedExecution {
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepInjectorFlowRunner.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StepInjectorFlowRunner
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
import java.util.List;
import weka.core.WekaException;
import weka.knowledgeflow.steps.Step;
/**
* A flow runner that runs a flow by injecting data into a target step
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public class StepInjectorFlowRunner extends FlowRunner {
/** True if the flow has been reset */
protected boolean m_reset = true;
/** True if data is streaming */
protected boolean m_streaming;
/**
* Rest the runner
*/
public void reset() {
this.m_reset = true;
this.m_streaming = false;
}
/**
* Inject data into the flow
*
* @param toInject the data to inject
* @param callback a {@code ExecutionFinishedCallback} to notify when
* execution completes
* @param target the target {@code Step} to inject to
* @throws WekaException if a problem occurs
*/
public void injectWithExecutionFinishedCallback(final Data toInject, final ExecutionFinishedCallback callback, final Step target) throws WekaException {
if (StepManagerImpl.connectionIsIncremental(toInject)) {
throw new WekaException("Only batch data can be injected via this method.");
}
this.addExecutionFinishedCallback(callback);
String connName = toInject.getConnectionName();
List<String> accceptableInputs = target.getIncomingConnectionTypes();
if (!accceptableInputs.contains(connName)) {
throw new WekaException("Step '" + target.getName() + "' can't accept a " + connName + " input at present!");
}
this.initializeFlow();
this.m_execEnv.submitTask(new StepTask<Void>(null) {
/** For serialization */
private static final long serialVersionUID = 663985401825979869L;
@Override
public void process() throws Exception {
target.processIncoming(toInject);
}
});
this.m_logHandler.logDebug("StepInjectorFlowRunner: Launching shutdown monitor");
this.launchExecutorShutdownThread();
}
/**
* Find a step in the flow
*
* @param stepName the name of the Step to find
* @param stepClass the class of the step to find
* @return the named step
* @throws WekaException if the named step is not in the flow or the found
* step is not of the supplied class
*/
public Step findStep(final String stepName, final Class stepClass) throws WekaException {
if (this.m_flow == null) {
throw new WekaException("No flow set!");
}
StepManagerImpl manager = this.m_flow.findStep(stepName);
if (manager == null) {
throw new WekaException("Step '" + stepName + "' does not seem " + "to be part of the flow!");
}
Step target = manager.getManagedStep();
if (target.getClass() != stepClass) {
throw new WekaException("Step '" + stepName + "' is not an instance of " + stepClass.getCanonicalName());
}
if (target.getIncomingConnectionTypes() == null || target.getIncomingConnectionTypes().size() == 0) {
throw new WekaException("Step '" + stepName + "' cannot process any incoming data!");
}
return target;
}
/**
* Inject streaming data into the target step in the flow
*
* @param toInject a streaming {@code Data} object to inject
* @param target the target step to inject to
* @param lastData true if this is the last piece of data in the stream
* @throws WekaException if a problem occurs
* @throws InterruptedException
*/
public void injectStreaming(final Data toInject, final Step target, final boolean lastData) throws WekaException, InterruptedException {
if (this.m_reset) {
if (this.m_streaming) {
this.m_execEnv.stopClientExecutionService();
}
String connName = toInject.getConnectionName();
List<String> accceptableInputs = target.getIncomingConnectionTypes();
if (!accceptableInputs.contains(connName)) {
throw new WekaException("Step '" + target.getName() + "' can't accept a " + connName + " input at present!");
}
this.initializeFlow();
toInject.setPayloadElement(StepManager.CON_AUX_DATA_INCREMENTAL_STREAM_END, false);
this.m_streaming = true;
this.m_reset = false;
}
if (lastData) {
toInject.setPayloadElement(StepManager.CON_AUX_DATA_INCREMENTAL_STREAM_END, true);
}
target.processIncoming(toInject);
if (lastData) {
this.m_logHandler.logDebug("StepInjectorFlowRunner: Shutting down executor service");
this.m_execEnv.stopClientExecutionService();
this.reset();
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepManager.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StepManager.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
import weka.core.Instances;
import weka.core.Settings;
import weka.core.WekaException;
import weka.gui.Logger;
import weka.knowledgeflow.steps.Step;
import java.util.List;
import java.util.Map;
/**
* Client public interface for the StepManager. Step implementations should only
* use this interface
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public interface StepManager {
// standard connection types
public static final String CON_DATASET = "dataSet";
public static final String CON_INSTANCE = "instance";
public static final String CON_TRAININGSET = "trainingSet";
public static final String CON_TESTSET = "testSet";
public static final String CON_BATCH_CLASSIFIER = "batchClassifier";
public static final String CON_INCREMENTAL_CLASSIFIER =
"incrementalClassifier";
public static final String CON_INCREMENTAL_CLUSTERER = "incrementalClusterer";
public static final String CON_BATCH_CLUSTERER = "batchClusterer";
public static final String CON_BATCH_ASSOCIATOR = "batchAssociator";
public static final String CON_VISUALIZABLE_ERROR = "visualizableError";
public static final String CON_THRESHOLD_DATA = "thresholdData";
public static final String CON_TEXT = "text";
public static final String CON_IMAGE = "image";
public static final String CON_GRAPH = "graph";
public static final String CON_CHART = "chart";
public static final String CON_INFO = "info";
public static final String CON_ENVIRONMENT = "environment";
public static final String CON_JOB_SUCCESS = "jobSuccess";
public static final String CON_JOB_FAILURE = "jobFailure";
// auxiliary information for various connections
public static final String CON_AUX_DATA_SET_NUM = "aux_set_num";
public static final String CON_AUX_DATA_MAX_SET_NUM = "aux_max_set_num";
public static final String CON_AUX_DATA_TEST_INSTANCE = "aux_testInstance";
public static final String CON_AUX_DATA_TESTSET = "aux_testsSet";
public static final String CON_AUX_DATA_TRAININGSET = "aux_trainingSet";
public static final String CON_AUX_DATA_INSTANCE = "aux_instance";
public static final String CON_AUX_DATA_TEXT_TITLE = "aux_textTitle";
public static final String CON_AUX_DATA_LABEL = "aux_label";
public static final String CON_AUX_DATA_CLASS_ATTRIBUTE = "class_attribute";
public static final String CON_AUX_DATA_GRAPH_TITLE = "graph_title";
public static final String CON_AUX_DATA_GRAPH_TYPE = "graph_type";
public static final String CON_AUX_DATA_CHART_MAX = "chart_max";
public static final String CON_AUX_DATA_CHART_MIN = "chart_min";
public static final String CON_AUX_DATA_CHART_DATA_POINT = "chart_data_point";
public static final String CON_AUX_DATA_CHART_LEGEND = "chart_legend";
public static final String CON_AUX_DATA_ENVIRONMENT_VARIABLES = "env_variables";
public static final String CON_AUX_DATA_ENVIRONMENT_PROPERTIES = "env_properties";
public static final String CON_AUX_DATA_ENVIRONMENT_RESULTS = "env_results";
public static final String CON_AUX_DATA_BATCH_ASSOCIATION_RULES =
"batch_association_rules";
public static final String CON_AUX_DATA_INCREMENTAL_STREAM_END =
"incremental_stream_end";
public static final String CON_AUX_DATA_IS_INCREMENTAL = "incremental_stream";
/**
* Get the name of the step managed by this StepManager
*
* @return the name of the managed step
*/
String getName();
/**
* Get the actual step managed by this step manager
*
* @return the Step managed by this step manager
*/
Step getManagedStep();
/**
* Get the executing environment. This contains information such as whether
* the flow is running in headless environment, what environment variables are
* available and methods to execute units of work in parallel.
*
* @return the execution environment
*/
ExecutionEnvironment getExecutionEnvironment();
/**
* Get the knowledge flow settings
*
* @return the knowledge flow settings
*/
Settings getSettings();
/**
* Get the number of steps that are connected with incoming connections
*
* @return the number of incoming connections
*/
int numIncomingConnections();
/**
* Get the number of steps that are connected with outgoing connections
*
* @return the number of outgoing connections
*/
int numOutgoingConnections();
/**
* Get the number of steps that are connected with the given incoming
* connection type
*
* @param connectionName the type of the incoming connection
* @return the number of steps connected with the specified incoming
* connection type
*/
int numIncomingConnectionsOfType(String connectionName);
/**
* Get the number of steps that are connected with the given outgoing
* connection type
*
* @param connectionName the type of the outgoing connection
* @return the number of steps connected with the specified outgoing
* connection type
*/
int numOutgoingConnectionsOfType(String connectionName);
/**
* Get a list of steps that are the source of incoming connections of the
* given type
*
* @param connectionName the name of the incoming connection to get a list of
* steps for
* @return a list of steps that are the source of incoming connections of the
* given type
*/
List<StepManager> getIncomingConnectedStepsOfConnectionType(
String connectionName);
/**
* Get the named step that is connected with an incoming connection.
*
* @param stepName the name of the step to get
* @return the step connected with an incoming connection or null if the named
* step is not connected
*/
StepManager getIncomingConnectedStepWithName(String stepName);
/**
* Get a named step connected to this step with an outgoing connection
*
* @param stepName the name of the step to look for
* @return the connected step
*/
StepManager getOutgoingConnectedStepWithName(String stepName);
/**
* Get a list of downstream steps connected to this step with the given
* connection type.
*
* @param connectionName the name of the outgoing connection
* @return a list of downstream steps connected to this one with the named
* connection type
*/
List<StepManager> getOutgoingConnectedStepsOfConnectionType(
String connectionName);
/**
* Get a Map of all incoming connections. Map is keyed by connection type;
* values are lists of steps
*
* @return a Map of incoming connections
*/
Map<String, List<StepManager>> getIncomingConnections();
/**
* Get a Map of all outgoing connections. Map is keyed by connection type;
* values are lists of steps
*
* @return a Map of outgoing connections
*/
Map<String, List<StepManager>> getOutgoingConnections();
/**
* Output data to all steps connected with the supplied outgoing connection
* type. Populates the source and connection name in the supplied Data object
* for the client
*
* @param outgoingConnectionName the type of the outgoing connection to send
* data to
* @param data a single Data object to send
* @throws WekaException if a problem occurs
*/
void outputData(String outgoingConnectionName, Data data)
throws WekaException;
/**
* Output one or more Data objects to all relevant steps. Populates the source
* in each Data object for the client, HOWEVER, the client must have populated
* the connection type in each Data object to be output so that the
* StepManager knows which connected steps to send the data to. Also notifies
* any registered {@code StepOutputListeners}. Note that the downstream
* step(s)' processIncoming() method is called in a separate thread for batch
* connections. Furthermore, if multiple Data objects are supplied via the
* varargs argument, and a target step will receive more than one of the Data
* objects, then they will be passed on to the step in question sequentially
* within the same thread of execution.
*
* @param data one or more Data objects to be sent
* @throws WekaException if a problem occurs
*/
void outputData(Data... data) throws WekaException;
/**
* Output a single Data object to the named step with the supplied outgoing
* connection type
*
* @param outgoingConnectionName the name of the outgoing connection
* @param stepName the name of the step to send the data to
* @param data the data to send
* @throws WekaException if a problem occurs
*/
void outputData(String outgoingConnectionName, String stepName, Data data)
throws WekaException;
/**
* Attempt to retrieve the structure (as a header-only set of instances) for
* the named incoming connection type. Assumes that there is only one step
* connected with the supplied incoming connection type.
*
* @param connectionName the type of the incoming connection to get the
* structure for
* @return the structure of the data for the specified incoming connection, or
* null if the structure can't be determined (or represented as an
* Instances object)
* @throws WekaException if a problem occurs
*/
Instances getIncomingStructureForConnectionType(String connectionName)
throws WekaException;
/**
* Attempt to get the incoming structure (as a header-only set of instances)
* from the given managed step for the given connection type.
*
* @param sourceStep the step manager managing the source step
* @param connectionName the name of the connection to attempt to get the
* structure for
* @return the structure as a header-only set of instances, or null if the
* source step can't determine this at present or if it can't be
* represented as a set of instances.
* @throws WekaException if a problem occurs
*/
Instances getIncomingStructureFromStep(StepManager sourceStep,
String connectionName) throws WekaException;
/**
* Returns true if, at this time, the step managed by this step manager is
* currently busy with processing
*
* @return true if the step managed by this step manager is busy
*/
boolean isStepBusy();
/**
* Return true if a stop has been requested by the runtime environment
*
* @return true if a stop has been requested
*/
boolean isStopRequested();
/**
* Return true if the current step is finished.
*
* @return true if the current step is finished
*/
boolean isStepFinished();
/**
* Step implementations processing batch data should call this to indicate
* that they have started some processing. Calling this should set the busy
* flag to true.
*/
void processing();
/**
* Step implementations processing batch data should call this to indicate
* that they have finished all processing. Calling this should set the busy
* flag to false.
*/
void finished();
/**
* Step implementations processing batch data should call this as soon as they
* have finished processing after a stop has been requested. Calling this
* should set the busy flag to false.
*/
void interrupted();
/**
* Returns true if this data object marks the end of an incremental stream.
* Note - does not check that the data object is actually an incremental one
* of some sort! Just checks to see if the CON_AUX_DATA_INCREMENTAL_STREAM_END
* flag is set to true or not;
*
* @param data the data element to check
* @return true if the data element is flagged as end of stream
*/
boolean isStreamFinished(Data data);
/**
* Start a throughput measurement. Should only be used by steps that are
* processing instance streams. Call just before performing a unit of work for
* an incoming instance.
*/
void throughputUpdateStart();
/**
* End a throughput measurement. Should only be used by steps that are
* processing instance streams. Call just after finishing a unit of work for
* an incoming instance
*/
void throughputUpdateEnd();
/**
* Signal that throughput measurement has finished. Should only be used by
* steps that are emitting incremental data. Call as the completion of an
* data stream.
*
* @param data one or more Data events (with appropriate connection type set)
* to pass on to downstream connected steps. These are used to carry
* any final data and to inform the downstream step(s) that the
* stream has ended
* @throws WekaException if a problem occurs
*/
void throughputFinished(Data... data) throws WekaException;
/**
* Log a message at the "low" level
*
* @param message the message to log
*/
void logLow(String message);
/**
* Log a message at the "basic" level
*
* @param message the message to log
*/
void logBasic(String message);
/**
* Log a message at the "detailed" level
*
* @param message the message to log
*/
void logDetailed(String message);
/**
* Log a message at the "debug" level
*
* @param message the message to log
*/
void logDebug(String message);
/**
* Log a warning message. Always makes it into the log regardless of what
* logging level the user has specified.
*
* @param message the message to log
*/
void logWarning(String message);
/**
* Log an error message. Always makes it into the log regardless of what
* logging level the user has specified. Causes all flow execution to halt.
* Prints an exception to the log if supplied.
*
* @param message the message to log
* @param cause the optional Throwable to log
*/
void logError(String message, Throwable cause);
/**
* Write a message to the log at the given logging level
*
* @param message the message to write
* @param level the level for the message
*/
void log(String message, LoggingLevel level);
/**
* Write a status message
*
* @param message the message
*/
void statusMessage(String message);
/**
* Get the log
*
* @return the log object
*/
Logger getLog();
/**
* Get the currently set logging level
*
* @return the currently set logging level
*/
LoggingLevel getLoggingLevel();
/**
* Substitute all known environment variables in the given string
*
* @param source the source string
* @return the source string with all known variables resolved
*/
String environmentSubstitute(String source);
/**
* Returns a reference to the step being managed if it has one or more
* outgoing CON_INFO connections and the managed step is of the supplied class
*
* @param stepClass the expected class of the step
* @return the step being managed if outgoing CON_INFO connections are present
* and the step is of the supplied class
* @throws WekaException if there are no outgoing CON_INFO connections or the
* managed step is the wrong type
*/
Step getInfoStep(Class stepClass) throws WekaException;
/**
* Returns a reference to the step being managed if it has one or more
* outgoing CON_INFO connections.
*
* @return the step being managed if outgoing CON_INFO connections are present
* @throws WekaException if there are no outgoing CON_INFO connections
*/
Step getInfoStep() throws WekaException;
/**
* Finds a named step in the current flow. Returns null if the named step is
* not present in the flow
*
* @param stepNameToFind the name of the step to find
* @return the StepManager of the named step, or null if the step does not
* exist in the current flow.
*/
StepManager findStepInFlow(String stepNameToFind);
/**
* Returns true if the step managed by this step manager has been marked as
* being resource (cpu/memory) intensive.
*
* @return true if the managed step is resource intensive
*/
boolean stepIsResourceIntensive();
/**
* Mark the step managed by this step manager as resource intensive
*
* @param isResourceIntensive true if the step managed by this step manager is
* resource intensive
*/
void setStepIsResourceIntensive(boolean isResourceIntensive);
/**
* Marked the step managed by this step manager as one that must run
* single-threaded. I.e. in an executor service with one worker thread, thus
* effectively preventing more than one copy of the step from executing at any
* one point in time
*
* @param mustRunSingleThreaded true if the managed step must run
* single-threaded
*/
void setStepMustRunSingleThreaded(boolean mustRunSingleThreaded);
/**
* Returns true if the step managed by this step manager has been marked as
* one that must run single-threaded. I.e. in an executor service with one
* worker thread, thus effectively preventing more than one copy of the step
* from executing at any one point in time
*
* @param mustRunSingleThreaded true if the managed step must run
* single-threaded
*/
boolean getStepMustRunSingleThreaded();
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepManagerImpl.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StepManagerImpl.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
import weka.core.Environment;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.Settings;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.Logger;
import weka.gui.beans.StreamThroughput;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.steps.KFStep;
import weka.knowledgeflow.steps.Step;
import weka.knowledgeflow.steps.WekaAlgorithmWrapper;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Concrete implementation of the StepManager interface. Has a number of
* methods, beyond those aimed at Step implementations, that are useful for
* applications that manipulate Steps and their connections.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public class StepManagerImpl implements StepManager {
/** The step being managed by this step manager */
protected Step m_managedStep;
/**
* True if the runtime environment has requested that the managed step stop
* processing
*/
protected boolean m_stopRequested;
/** True if, at the current time, the managed step is busy with processing */
protected boolean m_stepIsBusy;
/** True if the step is finished with processing (as far as it can tell) */
protected boolean m_stepIsFinished;
/**
* Set and get arbitrary properties relating to this step/step manager. E.g. a
* plugin execution environment might allow a step to be marked as execute
* remotely or locally
*/
protected Map<String, Object> m_stepProperties =
new HashMap<String, Object>();
/**
* Holds the name of the class of the editor for the managed step. If
* null/empty then the environment will dynamically generate an editor using
* the GeneicObjectEditor
*/
protected String m_managedStepEditor;
/** Map of incoming connections, keyed by connection name */
protected Map<String, List<StepManager>> m_connectedByTypeIncoming =
new LinkedHashMap<String, List<StepManager>>();
/** Map of outgoing connections, keyed by connection name */
protected Map<String, List<StepManager>> m_connectedByTypeOutgoing =
new LinkedHashMap<String, List<StepManager>>();
/** Non-step parties interested in outgoing data */
protected Map<String, List<StepOutputListener>> m_outputListeners =
new LinkedHashMap<String, List<StepOutputListener>>();
/**
* The StepVisual for this step (non-null if existing in a GUI environment)
*/
protected StepVisual m_stepVisual;
/**
* Temporary holder for the x axis visual location of this step. Populated
* when a flow is loaded if coordinates are present in the step's JSON
* representation. If running in an interactive graphical environment then a
* StepVisual will be created and initialized with these values
*/
protected int m_x = -1;
/**
* Temporary holder for the y axis visual location of this step. Populated
* when a flow is loaded if coordinates are present in the step's JSON
* representation. If running in an interactive graphical environment then a
* StepVisual will be created and initialized with these values
*/
protected int m_y = -1;
/**
* Holds the executing environment. Will be able to query to see if
* environment is headless. Will be able to request a stop of the entire flow.
*/
protected BaseExecutionEnvironment m_executionEnvironment;
/** The log to use */
protected LogManager m_log;
/** For measuring performance of instance streams */
protected transient StreamThroughput m_throughput;
/**
* Used when interrogating the managed step for what output connections it can
* produce at present given the incoming connections to the step. Normally, a
* step decides what it can produce on the basis of what physical incoming
* connections are present, regardless of whether the connection may or may
* not produce data (e.g. if there is a broken link in the graph further
* upstream). When this flag is true, the routine adjusts the number of
* incoming connections of a given type to account for broken upstream links.
* This is primarily used by the graphical UI in order to change connections
* from red to grey when rendering.
*/
protected boolean m_adjustForGraphicalRendering;
/** True if the managed step is a resource (cpu/memory) intensive step */
protected boolean m_stepIsResourceIntensive;
/**
* True if the managed step must run single threaded - i.e. in an executor
* service with one worker thread
*/
protected boolean m_stepMustRunSingleThreaded;
/**
* Constructor
*
* @param step the Step to manage
*/
public StepManagerImpl(Step step) {
setManagedStep(step);
}
/**
* Get the name of the Step being managed
*
* @return the name of the Step being managed
*/
@Override
public String getName() {
return m_managedStep.getName();
}
/**
* Get the step managed by this manager
*
* @return the step managed by this manager
*/
@Override
public Step getManagedStep() {
return m_managedStep;
}
/**
* Set the step managed by this manager
*
* @param step the step to manage
*/
public void setManagedStep(Step step) {
m_managedStep = step;
step.setStepManager(this);
setManagedStepEditorClass(step.getCustomEditorForStep());
Annotation a = step.getClass().getAnnotation(KFStep.class);
m_stepIsResourceIntensive = a != null && ((KFStep) a).resourceIntensive();
a = step.getClass().getAnnotation(SingleThreadedExecution.class);
m_stepMustRunSingleThreaded = a != null;
}
/**
* Set whether the managed step is resource (cpu/memory) intensive or not
*
* @param resourceIntensive true if the managed step is resource intensive
*/
@Override
public void setStepIsResourceIntensive(boolean resourceIntensive) {
m_stepIsResourceIntensive = resourceIntensive;
}
/**
* Get whether the managed step is resource (cpu/memory) intensive or not
*
* @return true if the step is resource intensive
*/
@Override
public boolean stepIsResourceIntensive() {
return m_stepIsResourceIntensive;
}
/**
* Set whether the managed step must run single-threaded. I.e. in an executor
* service with one worker thread, thus effectively preventing more than one
* copy of the step from executing at any one point in time
*
* @param mustRunSingleThreaded true if the managed step must run
* single-threaded
*/
@Override
public void setStepMustRunSingleThreaded(boolean mustRunSingleThreaded) {
m_stepMustRunSingleThreaded = mustRunSingleThreaded;
}
/**
* Get whether the managed step must run single-threaded. I.e. in an executor
* service with one worker thread, thus effectively preventing more than one
* copy of the step from executing at any one point in time
*
* @return true if the managed step must run single-threaded
*/
@Override
public boolean getStepMustRunSingleThreaded() {
return m_stepMustRunSingleThreaded;
}
/**
* Get the step visual in use (if running in a visual environment)
*
* @return the step visual in use
*/
public StepVisual getStepVisual() {
return m_stepVisual;
}
/**
* Set the step visual to use when running in a graphical environment
*
* @param visual the step visual to use
*/
public void setStepVisual(StepVisual visual) {
m_stepVisual = visual;
if (m_x != -1 && m_y != -1) {
m_stepVisual.setX(m_x);
m_stepVisual.setY(m_y);
}
}
/**
* Set a property for this step
*
* @param name the name of the property
* @param value the value of the property
*/
public void setStepProperty(String name, Object value) {
m_stepProperties.put(name, value);
}
/**
* Get a named property for this step.
*
* @param name the name of the property to get
* @return the value of the property or null if the property is not set
*/
public Object getStepProperty(String name) {
return m_stepProperties.get(name);
}
/**
* Get the fully qualified name of an editor component that can be used to
* graphically configure the step. If not supplied, then the environment will
* dynamically generate an editor using the GenericObjectEditor.
*
* @return editor the editor class to use
*/
protected String getManagedStepEditorClass() {
return m_managedStepEditor;
}
/**
* Set the fully qualified name of an editor component that can be used to
* graphically configure the step. If not supplied, then the environment will
* dynamically generate an editor using the GenericObjectEditor.
*
* @param editor the editor class to use
*/
protected void setManagedStepEditorClass(String editor) {
m_managedStepEditor = editor;
}
/**
* Get the execution environment the managed step is running in
*
* @return the execution environment
*/
@Override
public ExecutionEnvironment getExecutionEnvironment() {
return m_executionEnvironment;
}
/**
* Get the current knowledge flow settings
*
* @return the current knowledge flow settings
* @throws IllegalStateException if there is no execution environment
* available
*/
@Override
public Settings getSettings() {
if (getExecutionEnvironment() == null) {
throw new IllegalStateException("There is no execution environment "
+ "available!");
}
return getExecutionEnvironment().getSettings();
}
/**
* Set the execution environment the managed step is running in
*
* @param env the execution environment
* @throws WekaException if a problem occurs
*/
protected void setExecutionEnvironment(ExecutionEnvironment env)
throws WekaException {
if (!(env instanceof BaseExecutionEnvironment)) {
throw new WekaException(
"Execution environments need to be BaseExecutionEnvironment "
+ "(or subclass thereof)");
}
m_executionEnvironment = (BaseExecutionEnvironment) env;
setLog(m_executionEnvironment.getLog());
setLoggingLevel(m_executionEnvironment.getLoggingLevel());
}
/**
* Get the logging level in use
*
* @return the logging level in use
*/
@Override
public LoggingLevel getLoggingLevel() {
return m_log != null ? m_log.getLoggingLevel() : LoggingLevel.BASIC;
}
/**
* Set the logging level to use
*
* @param newLevel the level to use
*/
public void setLoggingLevel(LoggingLevel newLevel) {
if (m_log == null) {
m_log = new LogManager(getManagedStep());
}
m_log.setLoggingLevel(newLevel);
}
/**
* Get the log to use
*
* @return the log in use or null if no log has been set
*/
@Override
public Logger getLog() {
return m_log != null ? m_log.getLog() : null;
}
/**
* Set the log to use
*
* @param log the log to use
*/
public void setLog(Logger log) {
m_log = new LogManager(getManagedStep());
m_log.setLog(log);
}
/**
* Initialize the step being managed
*
* @return true if the initialization was successful
*/
protected boolean initStep() {
boolean initializedOK = false;
m_stepIsBusy = false;
m_stopRequested = false;
m_stepIsFinished = false;
try {
getManagedStep().stepInit();
// getManagedStep().init();
initializedOK = true;
} catch (WekaException ex) {
logError(ex.getMessage(), ex);
} catch (Throwable ex) {
logError(ex.getMessage(), ex);
}
m_throughput = null;
return initializedOK;
}
/**
* Returns true if, at the current time, the managed step is busy with
* processing
*
* @return true if the managed step is busy with processing
*/
@Override
public boolean isStepBusy() {
return m_stepIsBusy;
}
/**
* Return true if a stop has been requested by the runtime environment
*
* @return true if a stop has been requested
*/
@Override
public boolean isStopRequested() {
return m_stopRequested;
}
/**
* Return true if the current step is finished.
*
* @return true if the current step is finished
*/
@Override
public boolean isStepFinished() {
return m_stepIsFinished;
}
/**
* Set the status of the stop requested flag
*
* @param stopRequested true if a stop has been requested
*/
public void setStopRequested(boolean stopRequested) {
m_stopRequested = stopRequested;
}
/**
* Started processing. Sets the busy flag to true.
*/
@Override
public void processing() {
m_stepIsBusy = true;
}
/**
* Finished all processing. Sets the busy flag to false and prints a finished
* message to the status area of the log.
*/
@Override
public void finished() {
m_stepIsBusy = false;
m_stepIsFinished = true;
if (!isStopRequested()) {
statusMessage("Finished.");
}
}
/**
* Finished processing due to a stop being requested. Sets the busy flag to
* false.
*/
@Override
public void interrupted() {
m_stepIsBusy = false;
}
/**
* Returns true if this data object marks the end of an incremental stream.
* Note - does not check that the data object is actually an incremental one
* of some sort! Just checks to see if the CON_AUX_DATA_INCREMENTAL_STREAM_END
* flag is set to true or not;
*
* @param data the data element to check
* @return true if the data element is flagged as end of stream
*/
@Override
public boolean isStreamFinished(Data data) {
return data.getPayloadElement(CON_AUX_DATA_INCREMENTAL_STREAM_END, false);
}
/**
* Clients can use this to record a start point for streaming throughput
* measuring
*/
@Override
public void throughputUpdateStart() {
if (m_throughput == null) {
m_throughput = new StreamThroughput(stepStatusMessagePrefix());
}
processing();
m_throughput.updateStart();
}
/**
* Clients can use this to record a stop point for streaming throughput
* measuring
*/
@Override
public void throughputUpdateEnd() {
if (m_throughput != null) {
m_throughput.updateEnd(m_log.getLog());
if (isStopRequested()) {
finished();
}
}
}
/**
* Clients can use this to indicate that throughput measuring is finished
* (i.e. the stream being processed has ended). Final throughput information
* is printed to the log and status
*
* @param data one or more Data events (with appropriate connection type set)
* to pass on to downstream connected steps. These are used to carry
* any final data and to inform the downstream step(s) that the
* stream has ended
* @throws WekaException if a problem occurs
*/
@Override
public void throughputFinished(Data... data) throws WekaException {
finished();
if (data.length > 0) {
for (Data d : data) {
d.setPayloadElement(CON_AUX_DATA_INCREMENTAL_STREAM_END, true);
}
outputData(data);
}
if (m_throughput != null) {
m_throughput.finished(m_log.getLog());
}
// not actually interrupted - we just abuse this method in order to
// set the busy flag to false
interrupted();
}
private void disconnectStep(List<StepManager> connList, Step toDisconnect) {
Iterator<StepManager> iter = connList.iterator();
while (iter.hasNext()) {
StepManagerImpl candidate = (StepManagerImpl) iter.next();
if (toDisconnect == candidate.getManagedStep()) {
iter.remove();
break;
}
}
}
/**
* Disconnect the supplied step under the associated connection type from both
* the incoming and outgoing connections for the step managed by this manager.
* Does nothing if this step does not have any connections to the supplied
* step, or does not have connections to the supplied step of the required
* type.
*
* @param toDisconnect the step to disconnect
* @param connType the connection type to disconnect
*/
public void disconnectStepWithConnection(Step toDisconnect, String connType) {
// incoming first
List<StepManager> connectedWithType =
m_connectedByTypeIncoming.get(connType);
if (connectedWithType != null) {
disconnectStep(connectedWithType, toDisconnect);
if (connectedWithType.size() == 0) {
m_connectedByTypeIncoming.remove(connType);
}
}
// outgoing
connectedWithType = m_connectedByTypeOutgoing.get(connType);
if (connectedWithType != null) {
disconnectStep(connectedWithType, toDisconnect);
if (connectedWithType.size() == 0) {
m_connectedByTypeOutgoing.remove(connType);
}
}
}
/**
* Remove the supplied step from connections (both incoming and outgoing of
* all types) for the step managed by this manager. Does nothing if the this
* step does not have any connections to the supplied step
*
* @param toDisconnect the step to disconnect
*/
public void disconnectStep(Step toDisconnect) {
// incoming first
List<String> emptyCons = new ArrayList<String>();
for (Map.Entry<String, List<StepManager>> e : m_connectedByTypeIncoming
.entrySet()) {
// for (List<StepManager> sList : m_connectedByTypeIncoming.values()) {
List<StepManager> sList = e.getValue();
disconnectStep(sList, toDisconnect);
if (sList.size() == 0) {
emptyCons.add(e.getKey());
}
}
for (String conn : emptyCons) {
m_connectedByTypeIncoming.remove(conn);
}
emptyCons.clear();
// outgoing
for (Map.Entry<String, List<StepManager>> e : m_connectedByTypeOutgoing
.entrySet()) {
// for (List<StepManager> sList : m_connectedByTypeOutgoing.values()) {
List<StepManager> sList = e.getValue();
disconnectStep(sList, toDisconnect);
if (sList.size() == 0) {
emptyCons.add(e.getKey());
}
}
for (String conn : emptyCons) {
m_connectedByTypeOutgoing.remove(conn);
}
}
/**
* Clear all connections to/from the step managed by this manager. Also makes
* sure that all directly connected upstream and downstream steps remove their
* respective outgoing and incoming connections to this step
*/
public void clearAllConnections() {
m_connectedByTypeIncoming.clear();
m_connectedByTypeOutgoing.clear();
}
/**
* Add an incoming connection (comprising of the type of connection and
* associated step component) to this step of the specified type
*
* @param connectionName the name of the type of connection to add
* @param step the source step component that is connecting with given
* connection type
*/
public void
addIncomingConnection(String connectionName, StepManagerImpl step) {
List<StepManager> steps = m_connectedByTypeIncoming.get(connectionName);
if (steps == null) {
steps = new ArrayList<StepManager>();
m_connectedByTypeIncoming.put(connectionName, steps);
}
steps.add(step);
}
/**
* Remove an incoming connection to this step of the specified type
*
* @param connectionName the name of the type of connection to remove
* @param step the source step component associated with the given connection
* type
*/
public void removeIncomingConnection(String connectionName,
StepManagerImpl step) {
List<StepManager> steps = m_connectedByTypeIncoming.get(connectionName);
steps.remove(step);
}
/**
* Add an outgoing connection (comprising of the type of connection and
* associated target step) to this step of the specified type. Connection is
* only made if the target step will accept the connection type at this time
*
* @param connectionName the name of the type of connection to add
* @param step the target step component that is receiving the given
* connection type it can't accept the connection at the present time
* @return true if the connection was successful
*/
public boolean addOutgoingConnection(String connectionName,
StepManagerImpl step) {
return addOutgoingConnection(connectionName, step, false);
}
/**
* Add an outgoing connection (comprising of the type of connection and
* associated target step) to this step of the specified type. Connection is
* only made if the target step will accept the connection type at this time
*
* @param connectionName the name of the type of connection to add
* @param step the target step component that is receiving the given
* connection type
* @param force whether to force the connection, even if the target step says
* it can't accept the connection at the present time
* @return true if the connection was successful
*/
public boolean addOutgoingConnection(String connectionName,
StepManagerImpl step, boolean force) {
// if target step can accept this connection type at this time then
// create outgoing connection on this step and incoming connection
// on the target step
boolean connSuccessful = false;
List<String> targetCanAccept =
step.getManagedStep().getIncomingConnectionTypes();
if (targetCanAccept.contains(connectionName) || force) {
List<StepManager> steps = m_connectedByTypeOutgoing.get(connectionName);
if (steps == null) {
steps = new ArrayList<StepManager>();
m_connectedByTypeOutgoing.put(connectionName, steps);
}
step.addIncomingConnection(connectionName, this);
steps.add(step);
connSuccessful = true;
}
return connSuccessful;
}
/**
* Remove an outgoing connection from this step of the specified type
*
* @param connectionName the name of the type of connection to remove
* @param step the target step component associated with the given connection
* type
*/
public void removeOutgoingConnection(String connectionName,
StepManagerImpl step) {
List<StepManager> steps = m_connectedByTypeOutgoing.get(connectionName);
steps.remove(step);
// target step now loses an incoming connection
step.removeIncomingConnection(connectionName, this);
}
/**
* Get a list of steps providing incoming connections of the specified type
*
* @param connectionName the type of connection being received by this step
* @return a list of connected steps
*/
@Override
public List<StepManager> getIncomingConnectedStepsOfConnectionType(
String connectionName) {
return m_connectedByTypeIncoming.get(connectionName) != null ? m_connectedByTypeIncoming
.get(connectionName) : new ArrayList<StepManager>();
}
@Override
public List<StepManager> getOutgoingConnectedStepsOfConnectionType(
String connectionName) {
return m_connectedByTypeOutgoing.get(connectionName) != null ? m_connectedByTypeOutgoing
.get(connectionName) : new ArrayList<StepManager>();
}
private StepManager getConnectedStepWithName(String stepName,
Map<String, List<StepManager>> connectedSteps) {
StepManager result = null;
for (Map.Entry<String, List<StepManager>> e : connectedSteps.entrySet()) {
List<StepManager> stepsOfConnType = e.getValue();
for (StepManager s : stepsOfConnType) {
if (((StepManagerImpl) s).getManagedStep().getName().equals(stepName)) {
result = s;
break;
}
}
}
return result;
}
/**
* Get a named step connected to this step with an incoming connection
*
* @param stepName the name of the step to look for
* @return the connected step
*/
@Override
public StepManager getIncomingConnectedStepWithName(String stepName) {
return getConnectedStepWithName(stepName, m_connectedByTypeIncoming);
}
/**
* Get a named step connected to this step with an outgoing connection
*
* @param stepName the name of the step to look for
* @return the connected step
*/
@Override
public StepManager getOutgoingConnectedStepWithName(String stepName) {
return getConnectedStepWithName(stepName, m_connectedByTypeOutgoing);
}
/**
* Get the map of downstream (outgoing connections) connected steps
*
* @return the map of downstream connected steps
*/
@Override
public Map<String, List<StepManager>> getOutgoingConnections() {
return m_connectedByTypeOutgoing;
}
/**
* Get the man of upstream (incoming connections) connected steps
*
* @return the map of upstream connected steps
*/
@Override
public Map<String, List<StepManager>> getIncomingConnections() {
return m_connectedByTypeIncoming;
}
/**
* Register non-step third party to receive data from the managed step for the
* specified outgoing connection type. Output listeners are not serialized
* into the JSON flow when flows are saved.
*
* @param listener the output listener to register
* @param outputConnectionName the name of the connection type
*/
public void addStepOutputListener(StepOutputListener listener,
String outputConnectionName) {
List<StepOutputListener> listenersForConnectionType =
m_outputListeners.get(outputConnectionName);
if (listenersForConnectionType == null) {
listenersForConnectionType = new ArrayList<StepOutputListener>();
m_outputListeners.put(outputConnectionName, listenersForConnectionType);
}
if (!listenersForConnectionType.contains(listener)) {
listenersForConnectionType.add(listener);
}
}
/**
* De-register non-step third party from receiving data from the managed step
*
* @param listener the output listener to de-register
* @param outputConnectionName the name of the connection type the listener is
* registered against
*/
public void removeStepOutputListener(StepOutputListener listener,
String outputConnectionName) {
List<StepOutputListener> listenersForConnectionType =
m_outputListeners.get(outputConnectionName);
if (listenersForConnectionType != null) {
listenersForConnectionType.remove(listener);
}
}
/**
* Clear all registered StepOutputListeners
*/
public void clearAllStepOutputListeners() {
m_outputListeners.clear();
}
/**
* Clear all the StepOutputListeners that are registered to receive the
* supplied connection type.
*
* @param outputConnectionName type of the connection to clear the listeners
* for
*/
public void clearStepOutputListeners(String outputConnectionName) {
List<StepOutputListener> listenersForConnectionType =
m_outputListeners.get(outputConnectionName);
if (listenersForConnectionType != null) {
listenersForConnectionType.clear();
}
}
/**
* Pass any StepOutputListeners the supplied Data object
*
* @param data the data to pass on
*/
protected void notifyOutputListeners(Data data) throws WekaException {
List<StepOutputListener> listenersForType =
m_outputListeners.get(data.getConnectionName());
if (listenersForType != null) {
for (StepOutputListener l : listenersForType) {
if (!l.dataFromStep(data)) {
logWarning("StepOutputListener '" + l.getClass().getCanonicalName()
+ "' " + "did not process data '" + data.getConnectionName()
+ "' successfully'");
}
}
}
}
/**
* Output a Data object to all downstream connected Steps that are connected
* with the supplied connection name. Sets the connection type on the supplied
* Data object to the supplied connection name. Also notifies any registered
* StepOutputListeners.
*
* @param outgoingConnectionName the type of the outgoing connection to send
* data to
* @param data a single Data object to send
* @throws WekaException
*/
@Override
public void outputData(String outgoingConnectionName, Data data)
throws WekaException {
if (!isStopRequested()) {
data.setConnectionName(outgoingConnectionName);
data.setSourceStep(m_managedStep);
List<StepManager> toNotify =
m_connectedByTypeOutgoing.get(outgoingConnectionName);
if (toNotify != null) {
for (StepManager s : toNotify) {
if (!isStopRequested()) {
m_executionEnvironment.sendDataToStep((StepManagerImpl) s, data);
}
}
}
notifyOutputListeners(data);
}
}
/**
* Output one or more Data objects to all relevant steps. Populates the source
* in each Data object for the client, HOWEVER, the client must have populated
* the connection type in each Data object to be output so that the
* StepManager knows which connected steps to send the data to. Also notifies
* any registered {@code StepOutputListeners}. Note that the downstream
* step(s)' processIncoming() method is called in a separate thread for batch
* connections. Furthermore, if multiple Data objects are supplied via the
* varargs argument, and a target step will receive more than one of the Data
* objects, then they will be passed on to the step in question sequentially
* within the same thread of execution.
*
* @param data one or more Data objects to be sent
* @throws WekaException if a problem occurs
*/
@Override
public void outputData(Data... data) throws WekaException {
if (!isStopRequested()) {
Map<StepManagerImpl, List<Data>> stepsToSendTo =
new LinkedHashMap<StepManagerImpl, List<Data>>();
for (Data d : data) {
d.setSourceStep(m_managedStep);
if (d.getConnectionName() == null
|| d.getConnectionName().length() == 0) {
throw new WekaException("Data does not have a connection name set.");
}
List<StepManager> candidates =
m_connectedByTypeOutgoing.get(d.getConnectionName());
if (candidates != null) {
for (StepManager s : candidates) {
List<Data> toReceive = stepsToSendTo.get(s);
if (toReceive == null) {
toReceive = new ArrayList<Data>();
stepsToSendTo.put((StepManagerImpl) s, toReceive);
}
toReceive.add(d);
}
}
notifyOutputListeners(d);
}
for (Map.Entry<StepManagerImpl, List<Data>> e : stepsToSendTo.entrySet()) {
if (!e.getKey().isStopRequested()) {
m_executionEnvironment.sendDataToStep(e.getKey(), e.getValue()
.toArray(new Data[e.getValue().size()]));
}
}
}
}
/**
* Outputs the supplied Data object to the named Step. Does nothing if the
* named step is not connected immediately downstream of this Step. Sets the
* supplied connection name on the Data object. Also notifies any
* StepOutputListeners.
*
* @param outgoingConnectionName the name of the outgoing connection
* @param stepName the name of the step to send the data to
* @param data the data to send
* @throws WekaException
*/
@Override
public void outputData(String outgoingConnectionName, String stepName,
Data data) throws WekaException {
if (!isStopRequested()) {
data.setConnectionName(outgoingConnectionName);
data.setSourceStep(m_managedStep);
List<StepManager> outConnsOfType =
m_connectedByTypeOutgoing.get(outgoingConnectionName);
StepManagerImpl namedTarget = null;
for (StepManager c : outConnsOfType) {
if (((StepManagerImpl) c).getManagedStep().getName().equals(stepName)) {
namedTarget = (StepManagerImpl) c;
}
}
if (namedTarget != null && !namedTarget.isStopRequested()) {
m_executionEnvironment.sendDataToStep(namedTarget, data);
} else {
// TODO log an error here and stop?
}
notifyOutputListeners(data);
}
}
/**
* Start the managed step processing
*/
protected void startStep() {
try {
getManagedStep().start();
} catch (WekaException ex) {
interrupted();
logError(ex.getMessage(), ex);
} catch (Throwable ex) {
interrupted();
logError(ex.getMessage(), ex);
}
}
/**
* Stop the managed step's processing
*/
protected void stopStep() {
m_stopRequested = true;
getManagedStep().stop();
}
/**
* Have the managed step process the supplied data object
*
* @param data the data for the managed step to process
*/
protected void processIncoming(Data data) {
try {
getManagedStep().processIncoming(data);
} catch (WekaException ex) {
interrupted();
logError(ex.getMessage(), ex);
} catch (Throwable e) {
interrupted();
logError(e.getMessage(), e);
}
}
/**
* Used by the rendering routine in LayoutPanel to ensure that connections
* downstream from a deleted connection get rendered in grey rather than red.
*
* @return a list of outgoing connection types that the managed step can
* produce (adjusted to take into account any upstream broken
* connections)
*/
public List<String> getStepOutgoingConnectionTypes() {
m_adjustForGraphicalRendering = true;
List<String> results = getManagedStep().getOutgoingConnectionTypes();
m_adjustForGraphicalRendering = false;
return results;
}
/**
* Get the number of incoming connections to the managed step
*
* @return the number of incoming connections
*/
@Override
public int numIncomingConnections() {
int size = 0;
for (Map.Entry<String, List<StepManager>> e : m_connectedByTypeIncoming
.entrySet()) {
if (m_adjustForGraphicalRendering) {
size += numIncomingConnectionsOfType(e.getKey());
} else {
size += e.getValue().size();
}
}
return size;
}
/**
* Get the number of incoming connections to the managed step of a given type
*
* @param connectionName the name of the connection type
* @return the number of incoming connections of this type
*/
@Override
public int numIncomingConnectionsOfType(String connectionName) {
int num = 0;
List<StepManager> inOfType = m_connectedByTypeIncoming.get(connectionName);
if (inOfType != null) {
if (m_adjustForGraphicalRendering) {
// adjust num incoming connections according
// to what the upstream steps can produce at present
for (StepManager connS : inOfType) {
List<String> generatableOutputCons =
((StepManagerImpl) connS).getStepOutgoingConnectionTypes();
if (generatableOutputCons.contains(connectionName)) {
num++;
}
}
} else {
num = inOfType.size();
}
}
return num;
}
/**
* Get the number of outgoing connections from the managed step
*
* @return the number of incoming connections
*/
@Override
public int numOutgoingConnections() {
int size = 0;
for (Map.Entry<String, List<StepManager>> e : m_connectedByTypeOutgoing
.entrySet()) {
size += e.getValue().size() - (m_adjustForGraphicalRendering ? 1 : 0);
}
if (size < 0) {
size = 0;
}
return size;
}
/**
* Get the number of outgoing connections from the managed step of a given
* type
*
* @param connectionName the name of the connection type
* @return the number of outgoing connections of this type
*/
@Override
public int numOutgoingConnectionsOfType(String connectionName) {
int num = 0;
List<StepManager> outOfType = m_connectedByTypeOutgoing.get(connectionName);
if (outOfType != null) {
num = outOfType.size();
if (m_adjustForGraphicalRendering) {
num--;
}
}
return num;
}
/**
* Attempt to get the incoming structure (as a header-only set of instances)
* for the named incoming connection type. Assumes that there is only one
* incoming connection of the named type. If there are zero, or more than one,
* then null is returned
*
* @param connectionName the name of the incoming connection to get the
* structure for
* @return the structure as a header-only set of instances or null if there
* are zero or more than one upstream connected steps producing the
* named connection, or if the upstream step can't tell us the
* structure, or if the upstream step can't represent the structure of
* the connection type as a set of instances.
*
* @throws WekaException if a problem occurs
*/
@Override
public Instances getIncomingStructureForConnectionType(String connectionName)
throws WekaException {
if (getIncomingConnectedStepsOfConnectionType(connectionName).size() == 1) {
return ((StepManagerImpl) getIncomingConnectedStepsOfConnectionType(
connectionName).get(0)).getManagedStep()
.outputStructureForConnectionType(connectionName);
}
return null;
}
/**
* Attempt to get the incoming structure (as a header-only set of instances)
* from the given managed step for the given connection type.
*
* @param sourceStep the step manager managing the source step
* @param connectionName the name of the connection to attempt to get the
* structure for
* @return the structure as a header-only set of instances, or null if the
* source step can't determine this at present or if it can't be
* represented as a set of instances.
* @throws WekaException if a problem occurs
*/
@Override
public Instances getIncomingStructureFromStep(StepManager sourceStep,
String connectionName) throws WekaException {
return ((StepManagerImpl) sourceStep).getManagedStep()
.outputStructureForConnectionType(connectionName);
}
/**
* Log a message at the low logging level
*
* @param message the message to log
*/
@Override
public void logLow(String message) {
if (m_log != null) {
m_log.logLow(message);
}
}
/**
* Log a message at the basic logging level
*
* @param message the message to log
*/
@Override
public void logBasic(String message) {
if (m_log != null) {
m_log.logBasic(message);
}
}
/**
* Log a message at the detailed logging level
*
* @param message the message to log
*/
@Override
public void logDetailed(String message) {
if (m_log != null) {
m_log.logDetailed(message);
}
}
/**
* Log a message at the debugging logging level
*
* @param message the message to log
*/
@Override
public void logDebug(String message) {
if (m_log != null) {
m_log.logDebug(message);
}
}
/**
* Log a warning message
*
* @param message the message to log
*/
@Override
public void logWarning(String message) {
if (m_log != null) {
m_log.logWarning(message);
m_log.statusMessage("WARNING: " + message);
}
}
/**
* Log an error
*
* @param message the message to log
* @param cause the optional Throwable to log
*/
@Override
public void logError(String message, Throwable cause) {
if (m_log != null) {
m_log.log(message, LoggingLevel.ERROR, cause);
m_log.statusMessage("ERROR: " + message);
}
if (m_executionEnvironment != null) {
// fatal error - make sure that everything stops.
m_executionEnvironment.stopProcessing();
}
}
/**
* Output a status message to the status area of the log
*
* @param message the message to output
*/
@Override
public void statusMessage(String message) {
if (m_log != null) {
m_log.statusMessage(message);
}
}
/**
* Log a message at the supplied logging level
*
* @param message the message to write
* @param level the level for the message
*/
@Override
public void log(String message, LoggingLevel level) {
if (m_log != null) {
m_log.log(message, level, null);
}
}
/**
* Substitute the values of environment variables in the given string
*
* @param source the source string to substitute in
* @return the source string with all known environment variables resolved
*/
@Override
public String environmentSubstitute(String source) {
Environment toUse = Environment.getSystemWide(); // default system-wide
if (getExecutionEnvironment() != null) {
toUse = getExecutionEnvironment().getEnvironmentVariables();
}
String result = source;
if (source != null) {
try {
result = toUse.substitute(source);
} catch (Exception ex) {
// ignore
}
}
return result;
}
/**
* Returns a reference to the step being managed if it has one or more
* outgoing CON_INFO connections and the managed step is of the supplied class
*
* @param stepClass the expected class of the step
* @return the step being managed if outgoing CON_INFO connections are present
* and the step is of the supplied class
* @throws WekaException if there are no outgoing CON_INFO connections or the
* managed step is the wrong type
*/
@Override
public Step getInfoStep(Class stepClass) throws WekaException {
Step info = getInfoStep();
if (!(info.getClass() == stepClass)) {
throw new WekaException("The managed step ("
+ info.getClass().getCanonicalName() + ") is not "
+ "not an instance of the required class: "
+ stepClass.getCanonicalName());
}
return info;
}
/**
* Returns a reference to the step being managed if it has one or more
* outgoing CON_INFO connections.
*
* @return the step being managed if outgoing CON_INFO connections are present
* @throws WekaException if there are no outgoing CON_INFO connections
*/
@Override
public Step getInfoStep() throws WekaException {
if (numOutgoingConnectionsOfType(StepManager.CON_INFO) > 0) {
return getManagedStep();
}
throw new WekaException("There are no outgoing info connections from "
+ "this step!");
}
/**
* Finds a named step in the current flow. Returns null if the named step is
* not present in the flow
*
* @param stepNameToFind the name of the step to find
* @return the StepManager of the named step, or null if the step does not
* exist in the current flow.
*/
@Override
public StepManager findStepInFlow(String stepNameToFind) {
Flow flow = m_executionEnvironment.getFlowExecutor().getFlow();
return flow.findStep(stepNameToFind);
}
/**
* Gets a prefix for the step managed by this manager. Used to uniquely
* identify steps in the status area of the log
*
* @return a unique prefix for the step managed by this manager
*/
public String stepStatusMessagePrefix() {
String prefix =
(getManagedStep() != null ? getManagedStep().getName() : "Unknown") + "$";
prefix +=
(getManagedStep() != null ? getManagedStep().hashCode() : 1) + "|";
if (getManagedStep() instanceof WekaAlgorithmWrapper) {
Object wrappedAlgo =
((WekaAlgorithmWrapper) getManagedStep()).getWrappedAlgorithm();
if (wrappedAlgo instanceof OptionHandler) {
prefix +=
Utils.joinOptions(((OptionHandler) wrappedAlgo).getOptions()) + "|";
}
}
return prefix;
}
/**
* Return true if the supplied connection name is an incremental connection.
* Several built-in connection types are considered incremental: instance,
* incremental_classifier, and chart. Clients can indicate that their custom
* connection/data is incremental by setting the payload element
* "CON_AUX_DATA_IS_INCREMENTAL" to true in their Data object.
*
* @param conn the name of the connection to check
* @return true if the supplied connection name is an incremental connection
*/
protected static boolean connectionIsIncremental(Data conn) {
return conn.getConnectionName().equalsIgnoreCase(StepManager.CON_INSTANCE)
|| conn.getConnectionName().equalsIgnoreCase(
StepManager.CON_INCREMENTAL_CLASSIFIER)
|| conn.getConnectionName().equalsIgnoreCase(StepManager.CON_CHART)
|| conn.getPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, false);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepOutputListener.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StepOutputListener
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
import weka.core.WekaException;
/**
* Inteface to something that listens to the output from a {@code Step}
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public interface StepOutputListener {
/**
* Process data produced by a knowledge flow step
*
* @param data the payload to process
* @return true if processing was successful
* @throws WekaException in the case of a catastrophic failure of the
* StepOutputListener
*/
boolean dataFromStep(Data data) throws WekaException;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepTask.java
|
package weka.knowledgeflow;
import weka.knowledgeflow.steps.Step;
import java.io.Serializable;
import java.util.concurrent.Callable;
/**
* A task that can be executed by the ExecutionEnvironment's submitTask()
* service. Step's wanting to use this to execute units of work in parallel must
* extends it and can work with it in one of two ways:<br>
* <br>
*
* 1. By using the Future<ExecutionResult> returned by submitTask(), or<br>
* 2. By registering an implementation of StepCallback when creating a subclass
* of StepTask. <br>
*
* Subclasses of StepTask should store their results (and potentially errors) in
* the provided ExecutionResult member variable (obtainable by calling
* getExecutionResult()).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*
* @param <T> the type of the result stored in the returned ExecutionResult
* object
*/
public abstract class StepTask<T> implements Callable<ExecutionResult<T>>,
Serializable {
/** For serialization */
private static final long serialVersionUID = 2995081029283027784L;
/**
* The result of executing the task - ready to be populated by subclass's
* call() implementation
*/
protected ExecutionResult<T> m_result = new ExecutionResult<T>();
/** Optional callback to invoke after the processing is complete */
protected transient StepTaskCallback<T> m_callback;
/** The log to use */
protected LogManager m_log;
/** True if this is a high resource (cpu/memory) task */
protected boolean m_resourceIntensive = true;
/**
* True if only one of these tasks can be executing at any one time in the
* Knowledge Flow/JVM. This has priority over isResourceIntensive() and causes
* the task to run on an executor service with one worker thread.
*/
protected boolean m_mustRunSingleThreaded;
/**
* The callback notifier delegate. Performs the actual notification back to
* the step
*/
protected CallbackNotifierDelegate m_callbackNotifier =
new DefaultCallbackNotifierDelegate();
/**
* Constructor. Use this constructor if you are going to access the Future
* returned by ExecutionEnvironment.submitTask().
*
* @param source the source step producing this task
*/
public StepTask(Step source) {
this(source, null, false);
}
/**
* Constructor. Use this constructor if you are going to access the Future
* returned by ExecutionEnvironment.submitTask()
*
* @param source the source step producing this task
* @param resourceIntensive true if this task is cpu/memory intensive
*/
public StepTask(Step source, boolean resourceIntensive) {
this(source, null, resourceIntensive);
}
/**
* Constructor with supplied callback. Use this constructor to be notified via
* the supplied callback when a the task has completed processing
*
* @param source the source step producing this task
* @param callback the callback to use
*/
public StepTask(Step source, StepTaskCallback<T> callback) {
this(source, callback, false);
}
/**
* Constructor with supplied callback. Use this constructor to be notified via
* the supplied callback when a task has completed processing
*
* @param source the source step producing this task
* @param callback the callback to use
* @param resourceIntensive true if this task is cpu/memory intensive
*/
public StepTask(Step source, StepTaskCallback<T> callback,
boolean resourceIntensive) {
m_log = new LogManager(source);
m_callback = callback;
m_resourceIntensive = resourceIntensive;
}
/**
* Set whether this {@code StepTask} is resource intensive (cpu/memory) or
* not. By default, a {@code StepTask} is resource intensive
*
* @param resourceIntensive false if this {@code StepTask} is not resource
* intensive
*/
public void setResourceIntensive(boolean resourceIntensive) {
m_resourceIntensive = resourceIntensive;
}
/**
* Get whether this {@code StepTask} is resource intensive (cpu/memory) or
* not. By default, a {@code StepTask} is resource intensive
*
* @return false if this {@code StepTask} is not resource intensive
*/
public boolean isResourceIntensive() {
return m_resourceIntensive;
}
/**
* Set whether this {@code StepTask} must run single threaded - i.e. only
* one of these tasks is executing at any one time in the JVM. The Knowledge
* Flow uses a special executor service with a single worker thread to execute
* these tasks. This property, if true, overrides isResourceIntensive().
*
* @param singleThreaded true if this task must run single threaded
*/
public void setMustRunSingleThreaded(boolean singleThreaded) {
m_mustRunSingleThreaded = singleThreaded;
}
/**
* Get whether this {@code StepTask} must run single threaded - i.e. only
* one of these tasks is executing at any one time in the JVM. The Knowledge
* Flow uses a special executor service with a single worker thread to execute
* these tasks. This property, if true, overrides isResourceIntensive().
*
* @return true if this task must run single threaded
*/
public boolean getMustRunSingleThreaded() {
return m_mustRunSingleThreaded;
}
/**
* Get the callback notifier delegate to use. This method is used by the
* Execution environment and is not normally of interest to subclasses
*
* @return the callback notifier delegate in use
*/
protected final CallbackNotifierDelegate getCallbackNotifierDelegate() {
return m_callbackNotifier;
}
/**
* Set the callback notifier delegate to use. This method is used by the
* Execution environment and is not normally of interest to subclasses
*
* @param delegate the delegate to use
*/
protected final void setCallbackNotifierDelegate(
CallbackNotifierDelegate delegate) {
m_callbackNotifier = delegate;
}
/**
* Get the LogHandler to use for logging
*
* @return the LogHandler
*/
protected final LogManager getLogHandler() {
return m_log;
}
/**
* Set the logger to use. This is used by the execution environment -
* subclasses should call getLogHandler() to do logging
*
* @param log the log to use
*/
protected final void setLogHandler(LogManager log) {
m_log = log;
}
/**
* Notifies the registered callback (if any)
*
* @throws Exception if a problem occurs
*/
protected final void notifyCallback() throws Exception {
if (m_callback != null) {
m_callbackNotifier.notifyCallback(m_callback, this, m_result);
}
}
/**
* Get the result of execution
*
* @return the result of execution
*/
protected final ExecutionResult<T> getExecutionResult() {
return m_result;
}
/**
* Set the result of execution
*
* @param execResult the result of execution
*/
protected final void setExecutionResult(ExecutionResult<T> execResult) {
m_result = execResult;
}
/**
* Executor service calls this method to do the work
*
* @return the results of execution in an ExecutionResult
*/
@Override
public ExecutionResult<T> call() throws Exception {
try {
process();
} catch (Exception ex) {
getExecutionResult().setError(ex);
}
notifyCallback();
return m_result;
}
/**
* The actual work gets done here. Subclasses to override. Subclasses can use
* getExecutionResult() to obtain an ExecutionResult object to store their
* results in
*/
public abstract void process() throws Exception;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/StepTaskCallback.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StepTaskCallback
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow;
/**
* Callback that Steps can use when executing StepTasks via
* EnvironmentManager.submitTask().
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*
* @param <T> the result return type (gets encapsulated in an ExecutionResult)
*/
public interface StepTaskCallback<T> {
/**
* Gets called when the {@code StepTask} finishes processing
*
* @param result the {@code ExecutionrRsult} produced by the task
* @throws Exception if a problem occurs
*/
public void taskFinished(ExecutionResult<T> result) throws Exception;
/**
* Gets called if the {@code StepTask} fails for some reason
*
* @param failedTask the {@StepTask} that failed
* @param failedResult the {@ExecutionResult} produced by
* the failed task (might contain information pertaining to the
* failure)
* @throws Exception if a problem occurs
*/
public void
taskFailed(StepTask<T> failedTask, ExecutionResult<T> failedResult)
throws Exception;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ASEvaluator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ASEvaluator.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.attributeSelection.ASEvaluation;
import weka.attributeSelection.ASSearch;
import weka.attributeSelection.AttributeEvaluator;
import weka.attributeSelection.AttributeSelection;
import weka.attributeSelection.AttributeTransformer;
import weka.attributeSelection.RankedOutputSearch;
import weka.attributeSelection.Ranker;
import weka.attributeSelection.SubsetEvaluator;
import weka.attributeSelection.UnsupervisedAttributeEvaluator;
import weka.attributeSelection.UnsupervisedSubsetEvaluator;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.OptionMetadata;
import weka.core.Utils;
import weka.core.WekaException;
import weka.filters.unsupervised.attribute.Remove;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Step that wraps a Weka attribute or subset evaluator. Handles training and
* test set connections. Requires an ASSearchStrategy step to be connected via
* an "info" connection. Will output both attribute selection results (via text
* connections) and transformed data (via outgoing train or test set
* connections). When processing multiple incoming training and test folds, the
* step can either output a cross-validation style summary over all the folds or
* individual attribute selection results for each fold.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
* @see ASSearchStrategy
*/
@KFStep(name = "ASEvaluator", category = "AttSelection",
toolTipText = "Weka attribute selection evaluator wrapper", iconPath = "",
resourceIntensive = true)
public class ASEvaluator extends WekaAlgorithmWrapper {
private static final long serialVersionUID = -1280208826860871742L;
/** The evaluator (attribute or subset) being used */
protected ASEvaluation m_evaluatorTemplate;
/**
* The search strategy being used (retrieved via an incoming "info" connection
*/
protected ASSearch m_searchTemplate;
/**
* Any test folds waiting to be processed (i.e. have their dimensionality
* reduced
*/
protected Map<Integer, Instances> m_waitingTestData =
new HashMap<Integer, Instances>();
/** Holds selected attribute indices corresponding to training folds */
protected Map<Integer, int[]> m_selectedAttsStore =
new HashMap<Integer, int[]>();
/**
* Holds the calculated number of attributes to select (may depend on
* thresholds) for each training fold
*/
protected Map<Integer, Integer> m_numToSelectStore =
new HashMap<Integer, Integer>();
/**
* Holds the evaluator trained per fold in the case when it is a transformer
* (such as PCA)
*/
protected Map<Integer, AttributeTransformer> m_transformerStore =
new HashMap<Integer, AttributeTransformer>();
/** True if we've been reset */
protected boolean m_isReset;
/**
* True if we are processing cross-validation folds to produce a summary over
* the folds (as opposed to producing separate results per fold).
*/
protected boolean m_isDoingXVal;
/** Keeps count of the folds processed */
protected AtomicInteger m_setCount;
/**
* Whether to output separate evaluation results for each fold of a xval or
* report the cross-validation summary
*/
protected boolean m_treatXValFoldsSeparately;
/** Whether a ranking is being produced by the attribute selection */
protected boolean m_isRanking;
/**
* Eval to use when performing a cross-validation and not outputting separate
* results for each fold
*/
protected AttributeSelection m_eval;
/**
* Get the class of Weka algorithm wrapped by this wrapper
*
* @return the wrapped algorithm class
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.attributeSelection.ASEvaluation.class;
}
/**
* Set an instance of the wrapped algorithm to use
*
* @param algo the algorithm to use
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH
+ "filters.supervised.attribute.AttributeSelection.gif";
}
/**
* Get the evaluator wrapped by this step
*
* @return the attribute or subset evaluator wrapped by this step
*/
public ASEvaluation getEvaluator() {
return (weka.attributeSelection.ASEvaluation) getWrappedAlgorithm();
}
/**
* Set the evaluator to wrap (just calls setWrappedAlgorithm)
*
* @param eval the evaluator to use
*/
@ProgrammaticProperty
public void setEvaluator(ASEvaluation eval) {
setWrappedAlgorithm(eval);
}
/**
* Set whether to output separate results for each fold of a cross-validation,
* rather than averaging over folds.
*
* @param treatSeparately true if each fold will have results output
*/
@OptionMetadata(displayName = "Treat x-val folds separately",
description = "Output separate attribute selection results for each fold "
+ "of a cross-validation (rather than averaging across folds)")
public void setTreatXValFoldsSeparately(boolean treatSeparately) {
m_treatXValFoldsSeparately = treatSeparately;
}
/**
* Get whether to output separate results for each fold of a cross-validation,
* rather than averaging over folds.
*
* @return true if each fold will have results output
*/
public boolean getTreatXValFoldsSeparately() {
return m_treatXValFoldsSeparately;
}
/**
* Initialize at the start of a run
*
* @throws WekaException if there is an illegal configuration (i.e. Ranker
* search with subset evaluator or regular search with attribute
* evaluator
*/
@Override
public void stepInit() throws WekaException {
if (!(getWrappedAlgorithm() instanceof ASEvaluation)) {
throw new WekaException("Incorrect type of algorithm");
}
try {
m_evaluatorTemplate =
ASEvaluation.makeCopies((ASEvaluation) getWrappedAlgorithm(), 1)[0];
} catch (Exception ex) {
throw new WekaException(ex);
}
List<StepManager> infos = getStepManager()
.getIncomingConnectedStepsOfConnectionType(StepManager.CON_INFO);
if (infos.size() == 0) {
throw new WekaException(
"A search strategy needs to be supplied via an 'info' "
+ "connection type");
}
ASSearchStrategy searchStrategy =
(ASSearchStrategy) infos.get(0).getInfoStep(ASSearchStrategy.class);
m_searchTemplate = searchStrategy.getSearchStrategy();
if (m_searchTemplate instanceof RankedOutputSearch) {
m_isRanking =
((RankedOutputSearch) m_searchTemplate).getGenerateRanking();
}
if (m_evaluatorTemplate instanceof SubsetEvaluator
&& m_searchTemplate instanceof Ranker) {
throw new WekaException(
"The Ranker search strategy cannot be used with a "
+ "subset evaluator");
}
if (m_evaluatorTemplate instanceof AttributeEvaluator
&& !(m_searchTemplate instanceof Ranker)) {
throw new WekaException("The Ranker search strategy must be used in "
+ "conjunction with an attribute evaluator");
}
m_isReset = true;
m_waitingTestData.clear();
m_selectedAttsStore.clear();
m_numToSelectStore.clear();
m_transformerStore.clear();
m_eval = new AttributeSelection();
}
/**
* Process an incoming Data object
*
* @param data the data object to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Instances train = data.getPayloadElement(StepManager.CON_TRAININGSET);
Instances test = data.getPayloadElement(StepManager.CON_TESTSET);
Integer setNum = data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
if (m_isReset) {
m_isReset = false;
getStepManager().processing();
m_setCount = new AtomicInteger(maxSetNum != null ? maxSetNum : 1);
if (setNum != null && maxSetNum != null) {
m_isDoingXVal = maxSetNum > 1 && !m_treatXValFoldsSeparately;
if (m_evaluatorTemplate instanceof AttributeTransformer && m_isDoingXVal
&& !m_treatXValFoldsSeparately) {
throw new WekaException(
"Can't cross-validate an attribute transformer");
}
if (m_isDoingXVal) {
m_eval.setFolds(maxSetNum);
}
if (m_isRanking) {
m_eval.setRanking(m_isRanking);
}
}
}
if (m_isDoingXVal) {
processXVal(train, test, setNum, maxSetNum);
} else {
processNonXVal(train, test, setNum, maxSetNum);
}
if (isStopRequested()) {
getStepManager().interrupted();
} else if (m_setCount.get() == 0) {
if (m_isDoingXVal) {
// output xval summary
try {
StringBuilder builder = new StringBuilder();
builder.append("Search method: ");
String evalS = m_evaluatorTemplate.getClass().getCanonicalName();
evalS = evalS.substring(evalS.lastIndexOf('.') + 1, evalS.length());
builder.append(evalS).append(" ")
.append(m_evaluatorTemplate instanceof OptionHandler ? Utils
.joinOptions(((OptionHandler) m_evaluatorTemplate).getOptions())
: "")
.append("\nEvaluator: ");
String searchS = m_searchTemplate.getClass().getCanonicalName();
searchS =
searchS.substring(searchS.lastIndexOf('.') + 1, searchS.length());
builder.append(searchS).append(" ")
.append(m_searchTemplate instanceof OptionHandler ? Utils
.joinOptions(((OptionHandler) m_searchTemplate).getOptions())
: "")
.append("\n");
builder.append(m_eval.CVResultsString());
outputTextData(builder.toString(), null);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
getStepManager().finished();
// save memory
m_waitingTestData.clear();
m_selectedAttsStore.clear();
m_numToSelectStore.clear();
}
}
/**
* Output Data to outgoing text connections
*
* @param text the text to output
* @param setNum the fold/set number that this text is associated with
* @throws WekaException if a problem occurs
*/
protected void outputTextData(String text, Integer setNum)
throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_TEXT) == 0) {
return;
}
Data textData = new Data(StepManager.CON_TEXT, text);
String titleString = m_evaluatorTemplate.getClass().getCanonicalName();
titleString = titleString.substring(titleString.lastIndexOf('.') + 1,
titleString.length());
String searchString = m_searchTemplate.getClass().getCanonicalName();
searchString = searchString.substring(searchString.lastIndexOf('.') + 1,
searchString.length());
titleString += " (" + searchString + ")";
textData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
titleString);
if (setNum != null) {
textData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
}
getStepManager().outputData(textData);
}
/**
* Handles processing for single train sets, single train/test pairs or
* cross-validation folds when results are output for each separately
*
* @param train the training data
* @param test the test data
* @param setNum the set number for the training/test data
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void processNonXVal(Instances train, Instances test, Integer setNum,
Integer maxSetNum) throws WekaException {
if (train != null) {
try {
AttributeSelection eval = new AttributeSelection();
ASEvaluation evalCopy =
ASEvaluation.makeCopies(m_evaluatorTemplate, 1)[0];
ASSearch searchCopy = ASSearch.makeCopies(m_searchTemplate, 1)[0];
eval.setEvaluator(evalCopy);
eval.setSearch(searchCopy);
eval.setRanking(m_isRanking);
if (!isStopRequested()) {
String message = "Selecting attributes (" + train.relationName();
if (setNum != null && maxSetNum != null) {
message += ", set " + setNum + " of " + maxSetNum;
}
message += ")";
getStepManager().statusMessage(message);
getStepManager().logBasic(message);
eval.SelectAttributes(train);
if (evalCopy instanceof AttributeTransformer) {
m_transformerStore.put(setNum != null ? setNum : -1,
((AttributeTransformer) evalCopy));
}
// this will be the final set of selected (and potentially ranked)
// attributes including class attribute (if appropriate)
int[] selectedAtts = eval.selectedAttributes();
if (m_isRanking) {
m_numToSelectStore.put(setNum != null ? setNum : -1,
((RankedOutputSearch) searchCopy).getCalculatedNumToSelect());
}
// > 2 here as the info connection counts as 1
if (getStepManager().numIncomingConnections() > 2) {
m_selectedAttsStore.put(setNum != null ? setNum : -1, selectedAtts);
}
String results = eval.toResultsString();
outputTextData(results, setNum);
applyFiltering(StepManager.CON_TRAININGSET, selectedAtts, train,
setNum, maxSetNum);
// > 2 here because the info connection counts as 1
if (getStepManager().numIncomingConnections() > 2) {
Instances waitingTest =
m_waitingTestData.get(setNum != null ? setNum : -1);
if (waitingTest != null) {
checkTestFiltering(waitingTest, setNum != null ? setNum : -1,
maxSetNum);
}
} else {
m_setCount.decrementAndGet();
}
evalCopy.clean();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
} else {
checkTestFiltering(test, setNum != null ? setNum : -1, maxSetNum);
}
}
/**
* Handles processing in the cross-validation case when results are to be
* accumulated/averaged over the folds
*
* @param train the training data
* @param test the test data
* @param setNum the set number of this train/test pair
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void processXVal(Instances train, Instances test, Integer setNum,
Integer maxSetNum) throws WekaException {
if (train != null) {
try {
ASEvaluation evalCopy =
ASEvaluation.makeCopies(m_evaluatorTemplate, 1)[0];
ASSearch searchCopy = ASSearch.makeCopies(m_searchTemplate, 1)[0];
if (!isStopRequested()) {
String message =
"Selecting attributes x-val mode (" + train.relationName();
if (setNum != null && maxSetNum != null) {
message += ", set " + setNum + " of " + maxSetNum;
}
message += ")";
getStepManager().statusMessage(message);
getStepManager().logBasic(message);
evalCopy.buildEvaluator(train);
if (evalCopy instanceof AttributeTransformer) {
m_transformerStore.put(setNum != null ? setNum : -1,
((AttributeTransformer) evalCopy));
}
int[] selectedAtts = searchCopy.search(evalCopy, train);
selectedAtts = evalCopy.postProcess(selectedAtts);
if (m_isRanking) {
double[][] ranked =
((RankedOutputSearch) searchCopy).rankedAttributes();
selectedAtts = new int[ranked.length];
for (int i = 0; i < ranked.length; i++) {
selectedAtts[i] = (int) ranked[i][0];
}
}
updateXValStats(train, evalCopy, searchCopy, selectedAtts);
// > 2 here because the info connection counts as 1
if (getStepManager().numIncomingConnections() > 2) {
m_selectedAttsStore.put(setNum, selectedAtts);
}
if (m_isRanking) {
m_numToSelectStore.put(setNum,
((RankedOutputSearch) searchCopy).getCalculatedNumToSelect());
}
applyFiltering(StepManager.CON_TRAININGSET, selectedAtts, train,
setNum, maxSetNum);
// > 2 here because the info connection counts as 1
if (getStepManager().numIncomingConnections() > 2) {
Instances waitingTest = m_waitingTestData.get(setNum);
if (waitingTest != null) {
checkTestFiltering(waitingTest, setNum, maxSetNum);
}
} else {
m_setCount.decrementAndGet();
}
evalCopy.clean();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
} else {
checkTestFiltering(test, setNum, maxSetNum);
}
}
/**
* Check to see if there is a waiting set of selected attributes that can be
* used to reduce the dimensionality of the supplied test set
*
* @param test the test set to potentially filter
* @param setNum the set number of the test set
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected synchronized void checkTestFiltering(Instances test, Integer setNum,
Integer maxSetNum) throws WekaException {
if (isStopRequested()) {
return;
}
int[] selectedForSet = m_selectedAttsStore.get(setNum);
if (selectedForSet == null) {
m_waitingTestData.put(setNum, test);
} else {
applyFiltering(StepManager.CON_TESTSET, selectedForSet, test, setNum,
maxSetNum);
m_setCount.decrementAndGet();
}
}
/**
* Apply a filter to reduce the dimensionality of the supplied data. Outputs
* the reduced data to downstream steps on the given connection type
*
* @param connType the connection type to output on
* @param selectedAtts selected attribute indices to use when filtering
* @param data the instances to filter
* @param setNum the set number of the instances
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void applyFiltering(String connType, int[] selectedAtts,
Instances data, Integer setNum, Integer maxSetNum) throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().numOutgoingConnectionsOfType(connType) == 0) {
return;
}
int[] finalSet = new int[selectedAtts.length];
boolean adjust = (m_isDoingXVal || m_isRanking)
&& ((!(m_evaluatorTemplate instanceof UnsupervisedSubsetEvaluator)
&& !(m_evaluatorTemplate instanceof UnsupervisedAttributeEvaluator))
|| m_evaluatorTemplate instanceof AttributeTransformer);
if (m_isRanking) {
int numToSelect = m_numToSelectStore.get(setNum != null ? setNum : -1);
finalSet = new int[numToSelect];
if (data.classIndex() >= 0) {
if (adjust) {
// one more for the class
finalSet = new int[numToSelect + 1];
finalSet[numToSelect] = data.classIndex();
} else {
finalSet = new int[numToSelect];
}
}
for (int i = 0; i < numToSelect; i++) {
finalSet[i] = selectedAtts[i];
}
} else {
if (adjust) {
// one more for the class
finalSet = new int[selectedAtts.length + 1];
finalSet[selectedAtts.length] = data.classIndex();
}
for (int i = 0; i < selectedAtts.length; i++) {
finalSet[i] = selectedAtts[i];
}
}
try {
Instances reduced = null;
AttributeTransformer transformer =
m_transformerStore.get(setNum != null ? setNum : -1);
if (transformer != null) {
reduced =
new Instances(transformer.transformedHeader(), data.numInstances());
for (int i = 0; i < data.numInstances(); i++) {
reduced.add(transformer.convertInstance(data.instance(i)));
}
} else {
Remove r = new Remove();
r.setAttributeIndicesArray(finalSet);
r.setInvertSelection(true);
r.setInputFormat(data);
reduced = weka.filters.Filter.useFilter(data, r);
}
if (!isStopRequested()) {
String message = "Filtering " + connType + " (" + data.relationName();
if (setNum != null && maxSetNum != null) {
message += ", set " + setNum + " of " + maxSetNum;
}
message += ")";
getStepManager().statusMessage(message);
getStepManager().logBasic(message);
Data output = new Data(connType, reduced);
output.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
output.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(output);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Updates stats in the cross-validation case
*
* @param train the training data processed
* @param evaluator the evaluator used
* @param search the search strategy
* @param selectedAtts the attributes selected on this training data
* @throws Exception if a problem occurs
*/
protected synchronized void updateXValStats(Instances train,
ASEvaluation evaluator, ASSearch search, int[] selectedAtts)
throws Exception {
m_eval.updateStatsForModelCVSplit(train, evaluator, search, selectedAtts,
m_isRanking);
}
/**
* Get incoming connections accepted given the current state of the step
*
* @return a list of acceptable incoming connections
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET) == 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) == 0
&& getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET) == 1) {
result.add(StepManager.CON_TESTSET);
}
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INFO) == 0) {
result.add(StepManager.CON_INFO);
}
return result;
}
/**
* Get a list of output connections that can be produced given the current
* state of the step
*
* @return a list of output connections
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 1 && getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INFO) == 1) {
result.add(StepManager.CON_TEXT);
}
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET) == 1
&& getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INFO) == 1) {
result.add(StepManager.CON_TRAININGSET);
}
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) == 1
&& getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INFO) == 1) {
result.add(StepManager.CON_TESTSET);
}
return result;
}
/**
* Get the class name of the custom editor for this step
*
* @return the class name of the custom editor for this step
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ASEvaluatorStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ASSearchStrategy.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ASSearchStrategy.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import weka.attributeSelection.ASSearch;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.StepManager;
/**
* Step that wraps a Weka attribute selection search strategy. This is just an
* "info" step - i.e. it needs to be connection (via an StepManager.CON_INFO)
* connection to a ASEvaluator step.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
* @see ASEvaluator
*/
@KFStep(name = "ASSearchStrategy", category = "AttSelection",
toolTipText = "Weka attribute selection search wrapper", iconPath = "")
public class ASSearchStrategy extends WekaAlgorithmWrapper {
private static final long serialVersionUID = 5038697382280884975L;
/**
* Initialize the step
*/
@Override
public void stepInit() {
// nothing to do - we are just an "info" step
}
/**
* Get a list of incoming connections that this step accepts. This step is an
* info only step, so no incoming connections are allowed
*
* @return a list of connections that this step accepts
*/
@Override
public List<String> getIncomingConnectionTypes() {
// no incoming connections allowed
return new ArrayList<String>();
}
/**
* Get a list of outgoing connections from this step. This step is an info
* only step, so the only outgoing connection is of type "info".
*
* @return a list of outgoing connections
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return Arrays.asList(StepManager.CON_INFO);
}
/**
* Get the class of the algorithm wrapped by this wrapper step (ASSearch in
* this case).
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.attributeSelection.ASSearch.class;
}
/**
* Set the actual algorithm wrapped by this instance
*
* @param algo the algorithm wrapped
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath =
StepVisual.BASE_ICON_PATH
+ "filters.supervised.attribute.AttributeSelection.gif";
}
/**
* Set the search strategy wrapped by this step (calls setWrappedAlgorithm)
*
* @param searchStrategy the search strategy to wrap
*/
@ProgrammaticProperty
public void setSearchStrategy(ASSearch searchStrategy) {
setWrappedAlgorithm(searchStrategy);
}
/**
* Get the search strategy wrapped by this step
*
* @return
*/
public ASSearch getSearchStrategy() {
return (ASSearch) getWrappedAlgorithm();
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/AlterRelationName.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* AlterRelationName.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Step that alters the relation name for data received via instance, dataSet,
* trainingSet and testSet connections
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "AlterRelationName", category = "Flow",
toolTipText = "Alter the relation name in data sets",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DiamondPlain.gif")
public class AlterRelationName extends BaseStep {
private static final long serialVersionUID = 5894383194664583303L;
/**
* The set of source step identifiers that have had their data modified so far
*/
protected Set<String> m_hasAltered;
/** Text to modify the relation name with */
protected String m_relationNameModText = "";
/** The type of modification to make */
protected ModType m_modType = ModType.REPLACE;
/** For regex replacement */
protected Pattern m_regexPattern;
/** Regex string to match */
protected String m_regexMatch = "";
/** Whether to replace all rexex matches, or just the first */
protected boolean m_replaceAll;
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_hasAltered = new HashSet<>();
if (m_modType == ModType.REGEX && m_relationNameModText != null
&& m_relationNameModText.length() > 0 && m_regexMatch != null
&& m_regexMatch.length() > 0) {
m_regexPattern = Pattern.compile(m_regexMatch);
}
}
/**
* Set the modification text to apply
*
* @param text the text to apply
*/
@OptionMetadata(displayName = "Text to use",
description = "The text to modify the relation name with", displayOrder = 0)
public
void setModificationText(String text) {
m_relationNameModText = text;
}
/**
* Get the modification text to apply
*
* @return the modification text
*/
public String getModificationText() {
return m_relationNameModText;
}
/**
* Set the modification type to apply
*
* @param mod the modification type to apply
*/
@OptionMetadata(displayName = "Relation name modification type",
description = "The type of modification to apply", displayOrder = 1)
public void setModType(ModType mod) {
m_modType = mod;
}
/**
* Get the modification type to apply
*
* @return the modification type to apply
*/
public ModType getModType() {
return m_modType;
}
/**
* Set the match string for regex modifications
*
* @param match the regular expression to apply for matching
*/
@OptionMetadata(
displayName = "Regular expression",
description = "Regular expression to match when performing a REGEX modification",
displayOrder = 2)
public
void setRegexMatch(String match) {
m_regexMatch = match;
}
/**
* Get the match string for regex modifications
*
* @return the regular expression to apply for matching
*/
public String getRegexMatch() {
return m_regexMatch;
}
/**
* Set whether to replace all regular expression matches, or just the first.
*
* @param replaceAll true to replace all regex matches
*/
@OptionMetadata(displayName = "Replace all regex matches",
description = "Replace all matching occurrences if set to true, or just "
+ "the first match if set to false", displayOrder = 3)
public void setReplaceAll(boolean replaceAll) {
m_replaceAll = replaceAll;
}
/**
* Get whether to replace all regular expression matches, or just the first.
*
* @return true to replace all regex matches
*/
public boolean getReplaceAll() {
return m_replaceAll;
}
/**
* Process incoming data
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
if (m_relationNameModText.length() > 0) {
String toCheckKey = data.getSourceStep().getName();
String connName = data.getConnectionName();
if (!data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
connName +=
"_" + data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1)
+ "_"
+ data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
}
toCheckKey += connName;
if (!m_hasAltered.contains(toCheckKey)) {
getStepManager().logBasic(
"Altering relation name for data from step " + "'"
+ data.getSourceStep().getName() + "' (" + connName + ")");
// Do the relation name mod
Instances insts = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
insts = ((Instance) data.getPrimaryPayload()).dataset();
} else {
insts = data.getPrimaryPayload();
}
applyRelationNameChange(insts);
m_hasAltered.add(data.getSourceStep().getName());
}
}
// pass data through
getStepManager().outputData(data);
getStepManager().finished();
}
/**
* Apply the change to the relation name in the given Instances object
*
* @param insts the Instances object to operate on
*/
protected void applyRelationNameChange(Instances insts) {
switch (m_modType) {
case REPLACE:
insts.setRelationName(m_relationNameModText);
break;
case APPEND:
insts.setRelationName(insts.relationName() + m_relationNameModText);
break;
case PREPEND:
insts.setRelationName(m_relationNameModText + insts.relationName());
break;
case REGEX:
String rel = insts.relationName();
if (m_replaceAll) {
rel = m_regexPattern.matcher(rel).replaceAll(m_relationNameModText);
} else {
rel = m_regexPattern.matcher(rel).replaceFirst(m_relationNameModText);
}
insts.setRelationName(rel);
break;
}
}
/**
* Get the list of acceptable incoming connection types
*
* @return the list of acceptable incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
/**
* Get the list of outgoing connection types that can be made given the
* current state of incoming connections
*
* @return a list of outgoing connection types that can be made
*/
@Override
public List<String> getOutgoingConnectionTypes() {
Map<String, List<StepManager>> incomingConnected =
getStepManager().getIncomingConnections();
return new ArrayList<String>(incomingConnected.keySet());
}
/**
* Enum of modification types
*/
protected static enum ModType {
REPLACE, PREPEND, APPEND, REGEX;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Appender.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Appender.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.WekaException;
import weka.core.converters.SerializedInstancesLoader;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A bean that appends multiple incoming data connections into a single data
* set. The incoming connections can be either all instance connections or all
* batch-oriented connections (i.e. data set, training set and test set).
* Instance and batch connections can't be mixed. An amalgamated output is
* created that is a combination of all the incoming attributes. Missing values
* are used to fill columns that don't exist in a particular incoming data set.
* If all incoming connections are instance connections, then the outgoing
* connection must be an instance connection (and vice versa for incoming batch
* connections).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Appender", category = "Flow",
toolTipText = "Append multiple sets of instances",
iconPath = KFGUIConsts.BASE_ICON_PATH + "Appender.png")
public class Appender extends BaseStep {
private static final long serialVersionUID = -3003135257112845998L;
/**
* Used to keep track of how many upstream steps have sent us complete data
* sets (batch) or headers (incremental) so far.
*/
protected Map<Step, Instances> m_completed;
/** Handles on temp files used to store batches of instances in batch mode */
protected Map<Step, File> m_tempBatchFiles;
/** Used to hold the final header in the case of incremental operation */
protected Instances m_completeHeader;
/** Gets decremented for each incoming instance stream that has finished */
protected AtomicInteger m_streamingCountDown;
/**
* Holds savers used for incrementally saving incoming instance streams. After
* we've seen the structure from each incoming connection we can create the
* final output structure, pull any saved instances from the temp files and
* discard these savers as they will no longer be needed.
*/
protected transient Map<Step, ObjectOutputStream> m_incrementalSavers;
/** Holds a files in play for incremental incoming connections */
protected transient Map<Step, File> m_incrementalFiles;
/** Re-usable data object for streaming mode */
protected Data m_streamingData;
/** True if this step has been reset */
protected boolean m_isReset;
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_completed = new HashMap<Step, Instances>();
m_tempBatchFiles = new HashMap<Step, File>();
m_completeHeader = null;
m_incrementalSavers = new HashMap<Step, ObjectOutputStream>();
m_incrementalFiles = new HashMap<Step, File>();
m_streamingCountDown = new AtomicInteger(
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE));
m_streamingData = new Data(StepManager.CON_INSTANCE);
}
/**
* Get the incoming connection types accepted by this step at this time
*
* @return a list of incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() == 0 || getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INSTANCE) == 0) {
result.addAll(Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET));
}
if (getStepManager().numIncomingConnections() == 0 || getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
result.add(StepManager.CON_INSTANCE);
}
return result;
}
/**
* Get a list of outgoing connection types that this step can produce at this
* time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
result.add(StepManager.CON_INSTANCE);
} else {
result.addAll(Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET));
}
return result;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_isReset
&& !data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
getStepManager().processing();
m_isReset = false;
}
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
processStreaming(data);
if (m_streamingCountDown.get() == 0) {
// all done
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
}
} else {
processBatch(data);
if (m_completed.size() == getStepManager().numIncomingConnections()) {
// done
getStepManager().finished();
// save memory
m_completed.clear();
m_tempBatchFiles.clear();
}
}
if (isStopRequested()) {
getStepManager().interrupted();
// save memory
m_completed.clear();
m_tempBatchFiles.clear();
m_incrementalSavers.clear();
m_incrementalFiles.clear();
}
}
/**
* Process batch data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected synchronized void processBatch(Data data) throws WekaException {
Integer setNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
Instances insts = data.getPrimaryPayload();
if (setNum > 1 || maxSetNum > 1) {
// can't accept more than one dataset/batch from a particular source
throw new WekaException("Source " + data.getSourceStep().getName() + " "
+ "is generating more than one " + data.getConnectionName() + " "
+ "in a batch");
}
Instances header = new Instances(insts, 0);
m_completed.put(data.getSourceStep(), header);
// write these instances (serialized) to a temp file
try {
File tmpF =
File.createTempFile("weka", SerializedInstancesLoader.FILE_EXTENSION);
// tmpF.deleteOnExit();
ObjectOutputStream oos = new ObjectOutputStream(
new BufferedOutputStream(new FileOutputStream(tmpF)));
oos.writeObject(insts);
oos.flush();
oos.close();
m_tempBatchFiles.put(data.getSourceStep(), tmpF);
} catch (IOException e1) {
throw new WekaException(e1);
}
if (isStopRequested()) {
return;
}
// have we seen a dataset from every incoming connection?
if (m_completed.size() == getStepManager().numIncomingConnections()) {
// process all headers and create mongo header for new output.
// missing values will fill columns that don't exist in particular data
// sets
Instances output = makeOutputHeader();
getStepManager().logDetailed("Making output header structure");
try {
for (File f : m_tempBatchFiles.values()) {
ObjectInputStream ois = new ObjectInputStream(
new BufferedInputStream(new FileInputStream(f)));
Instances temp = (Instances) ois.readObject();
ois.close();
// copy each instance over
for (int i = 0; i < temp.numInstances(); i++) {
Instance converted = makeOutputInstance(output, temp.instance(i));
output.add(converted);
}
}
Data outputD = new Data(data.getConnectionName(), output);
outputD.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
outputD.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
getStepManager().outputData(outputD);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
/**
* Process streaming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected synchronized void processStreaming(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
Step source = data.getSourceStep();
Instance inst = data.getPrimaryPayload();
if (!m_completed.containsKey(source)) {
m_completed.put(source, inst.dataset());
}
if (m_completed.size() == getStepManager().numIncomingConnections()
&& m_completeHeader == null) {
// create mondo header...
getStepManager().logDetailed("Creating output header structure");
m_completeHeader = makeOutputHeader();
// now check for any buffered instances
if (m_incrementalSavers.size() > 0) {
// read in and convert these instances now
for (Map.Entry<Step, ObjectOutputStream> e : m_incrementalSavers
.entrySet()) {
// for (ObjectOutputStream s : m_incrementalSavers.values()) {
ObjectOutputStream s = e.getValue();
// finish off the saving process first
try {
// s.writeIncremental(null);
s.flush();
s.close();
// File tmpFile = s.retrieveFile();
File tmpFile = m_incrementalFiles.get(e.getKey());
ObjectInputStream ois = new ObjectInputStream(
new BufferedInputStream(new FileInputStream(tmpFile)));
Instance tmpLoaded = null;
do {
try {
tmpLoaded = (Instance) ois.readObject();
Instance converted =
makeOutputInstance(m_completeHeader, tmpLoaded);
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE,
converted);
getStepManager().outputData(m_streamingData);
} catch (Exception ex) {
// EOF
ois.close();
break;
}
} while (tmpLoaded != null);
/*
* ArffLoader loader = new ArffLoader(); loader.setFile(tmpFile);
* Instances tempStructure = loader.getStructure(); Instance
* tempLoaded = loader.getNextInstance(tempStructure); while
* (tempLoaded != null) { Instance converted =
* makeOutputInstance(m_completeHeader, tempLoaded);
* m_streamingData.setPayloadElement(StepManager.CON_INSTANCE,
* converted); getStepManager().outputData(data);
*
* tempLoaded = loader.getNextInstance(tempStructure); }
*/
} catch (Exception ex) {
throw new WekaException(ex);
}
}
m_incrementalSavers.clear();
m_incrementalFiles.clear();
}
}
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
m_streamingCountDown.decrementAndGet();
return;
}
if (m_completeHeader == null) {
ObjectOutputStream saver = m_incrementalSavers.get(data.getSourceStep());
if (saver == null) {
try {
File tmpFile = File.createTempFile("weka", ".arff");
saver = new ObjectOutputStream(
new BufferedOutputStream(new FileOutputStream(tmpFile)));
m_incrementalSavers.put(data.getSourceStep(), saver);
m_incrementalFiles.put(data.getSourceStep(), tmpFile);
} catch (IOException ex) {
throw new WekaException(ex);
}
}
// ArffSaver saver = m_incrementalSavers.get(data.getSourceStep());
// if (saver == null) {
/*
* saver = new ArffSaver(); try { File tmpFile =
* File.createTempFile("weka", ".arff"); saver.setFile(tmpFile);
* saver.setRetrieval(weka.core.converters.Saver.INCREMENTAL);
* saver.setInstances(new Instances(inst.dataset(), 0));
* m_incrementalSavers.put(data.getSourceStep(), saver); } catch
* (IOException e1) { throw new WekaException(e1); }
*/
try {
// saver.writeIncremental(inst);
saver.writeObject(inst);
} catch (IOException e1) {
throw new WekaException(e1);
}
// }
} else {
Instance newI = makeOutputInstance(m_completeHeader, inst);
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, newI);
getStepManager().outputData(m_streamingData);
}
}
/**
* Makes an output instance
*
* @param output the structure of the output
* @param source the source instance
* @return an output instance
*/
private Instance makeOutputInstance(Instances output, Instance source) {
double[] newVals = new double[output.numAttributes()];
for (int i = 0; i < newVals.length; i++) {
newVals[i] = Utils.missingValue();
}
for (int i = 0; i < source.numAttributes(); i++) {
if (!source.isMissing(i)) {
Attribute s = source.attribute(i);
int outputIndex = output.attribute(s.name()).index();
if (s.isNumeric()) {
newVals[outputIndex] = source.value(s);
} else if (s.isString()) {
String sVal = source.stringValue(s);
newVals[outputIndex] =
output.attribute(outputIndex).addStringValue(sVal);
} else if (s.isRelationValued()) {
Instances rVal = source.relationalValue(s);
newVals[outputIndex] =
output.attribute(outputIndex).addRelation(rVal);
} else if (s.isNominal()) {
String nomVal = source.stringValue(s);
newVals[outputIndex] =
output.attribute(outputIndex).indexOfValue(nomVal);
}
}
}
Instance newInst = new DenseInstance(source.weight(), newVals);
newInst.setDataset(output);
return newInst;
}
/**
* Create the structure of the output
*
* @return the structure of the output as a header-only set of instances
* @throws WekaException if a problem occurs
*/
protected Instances makeOutputHeader() throws WekaException {
return makeOutputHeader(m_completed.values());
}
/**
* Create the structure of the output given a collection of input structures
*
* @param headers a collection of incoming instance structures
* @return the structure of the output as a header-only set of instances
* @throws WekaException if a problem occurs
*/
protected Instances makeOutputHeader(Collection<Instances> headers)
throws WekaException {
// process each header in turn...
Map<String, Attribute> attLookup = new HashMap<String, Attribute>();
List<Attribute> attList = new ArrayList<Attribute>();
Map<String, Set<String>> nominalLookups =
new HashMap<String, Set<String>>();
for (Instances h : headers) {
for (int i = 0; i < h.numAttributes(); i++) {
Attribute a = h.attribute(i);
if (!attLookup.containsKey(a.name())) {
attLookup.put(a.name(), a);
attList.add(a);
if (a.isNominal()) {
TreeSet<String> nVals = new TreeSet<String>();
for (int j = 0; j < a.numValues(); j++) {
nVals.add(a.value(j));
}
nominalLookups.put(a.name(), nVals);
}
} else {
Attribute storedVersion = attLookup.get(a.name());
// mismatched types between headers - can't continue
if (storedVersion.type() != a.type()) {
throw new WekaException("Conflicting types for attribute "
+ "name '" + a.name() + "' between incoming " + "instance sets");
}
if (storedVersion.isNominal()) {
Set<String> storedVals = nominalLookups.get(a.name());
for (int j = 0; j < a.numValues(); j++) {
storedVals.add(a.value(j));
}
}
}
}
}
ArrayList<Attribute> finalAttList = new ArrayList<Attribute>();
for (Attribute a : attList) {
Attribute newAtt = null;
if (a.isDate()) {
newAtt = new Attribute(a.name(), a.getDateFormat());
} else if (a.isNumeric()) {
newAtt = new Attribute(a.name());
} else if (a.isRelationValued()) {
newAtt = new Attribute(a.name(), a.relation());
} else if (a.isNominal()) {
Set<String> vals = nominalLookups.get(a.name());
List<String> newVals = new ArrayList<String>();
for (String v : vals) {
newVals.add(v);
}
newAtt = new Attribute(a.name(), newVals);
} else if (a.isString()) {
newAtt = new Attribute(a.name(), (List<String>) null);
}
finalAttList.add(newAtt);
}
return new Instances(
"Appended_" + getStepManager().numIncomingConnections() + "_sets",
finalAttList, 0);
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection to get the output structure for
* @return the output structure or null if it can't be produced
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().numIncomingConnections() > 0) {
List<Instances> incomingHeaders = new ArrayList<Instances>();
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
if (e.getValue().size() > 0) {
String incomingConType = e.getKey();
for (StepManager sm : e.getValue()) {
Instances incomingStruc = getStepManager()
.getIncomingStructureFromStep(sm, incomingConType);
if (incomingStruc == null) {
// can't determine final output structure if any incoming
// structures are null at present
return null;
}
incomingHeaders.add(incomingStruc);
}
}
}
if (incomingHeaders.size() > 0) {
return makeOutputHeader(incomingHeaders);
}
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Associator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Associator.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.associations.AssociationRules;
import weka.associations.AssociationRulesProducer;
import weka.core.Attribute;
import weka.core.Drawable;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Step that wraps a Weka associator. Handles dataSet, trainingSet and testSet
* incoming connections. All connections are treated the same - i.e. are used as
* training data.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Associator", category = "Associations",
toolTipText = "Weka associator wrapper", iconPath = "")
public class Associator extends WekaAlgorithmWrapper {
private static final long serialVersionUID = -589410455393151511L;
/** Template for the associator in use */
protected weka.associations.Associator m_associatorTemplate;
/**
* Get the class of the algorithm being wrapped
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.associations.Associator.class;
}
/**
* Set the wrapped algorithm
*
* @param algo the wrapped algorithm
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultAssociator.gif";
}
/**
* Set the associator to use. Is a convenience method - just calls
* setWrappedAlgorithm()
*
* @param associator the associator to use
*/
@ProgrammaticProperty
public void setAssociator(weka.associations.Associator associator) {
setWrappedAlgorithm(associator);
}
/**
* Get the associator to use. Is a convenience method - just calls
* getWrappedAlgorithm()
*
* @return the associator in use
*/
public weka.associations.Associator getAssociator() {
return (weka.associations.Associator) getWrappedAlgorithm();
}
/**
* Initializes the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
if (!(getWrappedAlgorithm() instanceof weka.associations.Associator)) {
throw new WekaException("Wrapped algorithm is not an instance of "
+ "a weka.associations.Associator!");
}
try {
m_associatorTemplate =
weka.associations.AbstractAssociator.makeCopy(getAssociator());
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Processes incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Instances insts = data.getPrimaryPayload();
Integer setNum = data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
try {
if (!isStopRequested()) {
getStepManager().processing();
weka.associations.Associator associator =
weka.associations.AbstractAssociator.makeCopy(m_associatorTemplate);
associator.buildAssociations(insts);
outputAssociatorData(associator, setNum, maxSetNum);
outputTextData(associator, insts, setNum);
outputGraphData(associator, insts, setNum);
if (!isStopRequested()) {
getStepManager().finished();
} else {
getStepManager().interrupted();
}
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Outputs the trained associator to downstream steps that are interested
*
* @param associator the associator to output
* @param setNum the set number of the data used to train the associator
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void outputAssociatorData(weka.associations.Associator associator,
Integer setNum, Integer maxSetNum) throws WekaException {
if (getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_BATCH_ASSOCIATOR) == 0) {
return;
}
Data out = new Data(StepManager.CON_BATCH_ASSOCIATOR, associator);
if (setNum != null && maxSetNum != null) {
out.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
out.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, maxSetNum);
}
if (associator instanceof AssociationRulesProducer) {
AssociationRules rules =
((AssociationRulesProducer) associator).getAssociationRules();
out.setPayloadElement(StepManager.CON_AUX_DATA_BATCH_ASSOCIATION_RULES,
rules);
}
getStepManager().outputData(out);
}
/**
* Outputs textual representation of associator to downstream steps
*
* @param associator the associator to output the textual form for
* @param train the training data used to train the associator
* @param setNum the set number of the data
* @throws WekaException if a problem occurs
*/
protected void outputTextData(weka.associations.Associator associator,
Instances train, Integer setNum) throws WekaException {
if (getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_TEXT) == 0) {
return;
}
String modelString = associator.toString();
String titleString = associator.getClass().getName();
titleString = titleString.substring(titleString.lastIndexOf('.') + 1,
titleString.length());
modelString = "=== Associator model ===\n\n" + "Scheme: " + titleString
+ "\n" + "Relation: " + train.relationName() + "\n\n" + modelString;
titleString = "Model: " + titleString;
Data textData = new Data(StepManager.CON_TEXT, modelString);
textData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
titleString);
if (setNum != null) {
textData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
}
getStepManager().outputData(textData);
}
protected void outputGraphData(weka.associations.Associator associator,
Instances insts, Integer setNum) throws WekaException {
if (!(associator instanceof Drawable) || getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_GRAPH) == 0) {
return;
}
try {
String graphString = ((Drawable) associator).graph();
int graphType = ((Drawable) associator).graphType();
String grphTitle = associator.getClass().getCanonicalName();
grphTitle =
grphTitle.substring(grphTitle.lastIndexOf('.') + 1, grphTitle.length());
String set = setNum != null ? "Set " + setNum : "";
grphTitle = set + " (" + insts.relationName() + ") " + grphTitle;
Data graphData = new Data(StepManager.CON_GRAPH);
graphData.setPayloadElement(StepManager.CON_GRAPH, graphString);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TITLE,
grphTitle);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TYPE,
graphType);
getStepManager().outputData(graphData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Get a list of incoming connection types that this step can accept at this
* time
*
* @return a list of incoming connections that this step can accept
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() == 0) {
result.addAll(Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET));
}
return result;
}
/**
* Get a list of outgoing connections that this step can produce at this time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 0) {
result.add(StepManager.CON_BATCH_ASSOCIATOR);
result.add(StepManager.CON_TEXT);
}
result.add(StepManager.CON_INFO);
return result;
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the connection type to generate output structure for
* @return the output structure this step generates, or null if it can't be
* determined at this point in time
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (connectionName.equals(StepManager.CON_TEXT)) {
ArrayList<Attribute> attInfo = new ArrayList<Attribute>();
attInfo.add(new Attribute("Title", (ArrayList<String>) null));
attInfo.add(new Attribute("Text", (ArrayList<String>) null));
return new Instances("TextEvent", attInfo, 0);
} else if (connectionName.equals(StepManager.CON_BATCH_ASSOCIATOR)) {
if (m_associatorTemplate instanceof AssociationRulesProducer) {
// we make the assumption here that consumers of
// batchAssociationRules events will utilize a structure
// consisting of the RHS of the rule (String), LHS of the
// rule (String) and one numeric attribute for each metric
// associated with the rules.
String[] metricNames = ((AssociationRulesProducer) m_associatorTemplate)
.getRuleMetricNames();
ArrayList<Attribute> attInfo = new ArrayList<Attribute>();
attInfo.add(new Attribute("LHS", (ArrayList<String>) null));
attInfo.add(new Attribute("RHS", (ArrayList<String>) null));
attInfo.add(new Attribute("Support"));
for (String metricName : metricNames) {
attInfo.add(new Attribute(metricName));
}
return new Instances(StepManager.CON_AUX_DATA_BATCH_ASSOCIATION_RULES,
attInfo, 0);
}
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/AttributeSummarizer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* AttributeSummarizer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.PluginManager;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.OffscreenChartRenderer;
import weka.gui.beans.WekaOffscreenChartRenderer;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Step that collects data to display in a summary overview of attribute
* distributions
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "AttributeSummarizer", category = "Visualization",
toolTipText = "Visualize datasets in a matrix of histograms",
iconPath = KFGUIConsts.BASE_ICON_PATH + "AttributeSummarizer.gif")
public class AttributeSummarizer extends BaseSimpleDataVisualizer {
private static final long serialVersionUID = 2313372820072708102L;
/** The x-axis attribute name */
protected String m_xAxis = "";
/** The offscreen renderer to use */
protected transient OffscreenChartRenderer m_offscreenRenderer;
/** Name of the renderer to use for offscreen chart rendering */
protected String m_offscreenRendererName = "Weka Chart Renderer";
/**
* Additional options for the offscreen renderer
*/
protected String m_additionalOptions = "";
/** Width of offscreen plots */
protected String m_width = "500";
/** Height of offscreen plots */
protected String m_height = "400";
/**
* Set the name of the attribute for the x-axis in offscreen plots. This
* defaults to "False Positive Rate" for threshold curves if not specified.
*
* @param xAxis the name of the xAxis
*/
@OptionMetadata(displayName = "X-axis attribute",
description = "Attribute name " + "or /first, /last or /<index>",
displayOrder = 1)
public void setOffscreenXAxis(String xAxis) {
m_xAxis = xAxis;
}
/**
* Get the name of the attribute for the x-axis in offscreen plots
*
* @return the name of the xAxis
*/
public String getOffscreenXAxis() {
return m_xAxis;
}
/**
* Set the width (in pixels) of the offscreen image to generate.
*
* @param width the width in pixels.
*/
@OptionMetadata(displayName = "Chart width (pixels)",
description = "Width of the rendered chart", displayOrder = 2)
public void setOffscreenWidth(String width) {
m_width = width;
}
/**
* Get the width (in pixels) of the offscreen image to generate.
*
* @return the width in pixels.
*/
public String getOffscreenWidth() {
return m_width;
}
/**
* Set the height (in pixels) of the offscreen image to generate
*
* @param height the height in pixels
*/
@OptionMetadata(displayName = "Chart height (pixels)",
description = "Height of the rendered chart", displayOrder = 3)
public void setOffscreenHeight(String height) {
m_height = height;
}
/**
* Get the height (in pixels) of the offscreen image to generate
*
* @return the height in pixels
*/
public String getOffscreenHeight() {
return m_height;
}
/**
* Set the name of the renderer to use for offscreen chart rendering
* operations
*
* @param rendererName the name of the renderer to use
*/
@ProgrammaticProperty
public void setOffscreenRendererName(String rendererName) {
m_offscreenRendererName = rendererName;
m_offscreenRenderer = null;
}
/**
* Get the name of the renderer to use for offscreen chart rendering
* operations
*
* @return the name of the renderer to use
*/
public String getOffscreenRendererName() {
return m_offscreenRendererName;
}
/**
* Set the additional options for the offscreen renderer
*
* @param additional additional options
*/
@ProgrammaticProperty
public void setOffscreenAdditionalOpts(String additional) {
m_additionalOptions = additional;
}
/**
* Get the additional options for the offscreen renderer
*
* @return the additional options
*/
public String getOffscreenAdditionalOpts() {
return m_additionalOptions;
}
/**
* Process incoming data
*
* @param data the data to process
*/
@Override
public synchronized void processIncoming(Data data) {
super.processIncoming(data, false);
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_IMAGE) > 0) {
setupOffscreenRenderer();
createOffscreenPlot(data);
}
getStepManager().finished();
}
/**
* Create an offscreen plot
*
* @param data the data to create the plot from
*/
protected void createOffscreenPlot(Data data) {
List<Instances> offscreenPlotData = new ArrayList<Instances>();
Instances predictedI = data.getPrimaryPayload();
boolean colorSpecified = false;
String additional = m_additionalOptions;
if (m_additionalOptions.length() > 0) {
additional = environmentSubstitute(additional);
}
if (!additional.contains("-color")
&& m_offscreenRendererName.contains("Weka Chart Renderer")) {
// for WekaOffscreenChartRenderer only
if (additional.length() > 0) {
additional += ",";
}
if (predictedI.classIndex() >= 0) {
additional += "-color=" + predictedI.classAttribute().name();
} else {
additional += "-color=/last";
}
} else {
colorSpecified = true;
}
if (predictedI.classIndex() >= 0 && predictedI.classAttribute().isNominal()
&& !colorSpecified) {
// set up multiple series - one for each class
Instances[] classes = new Instances[predictedI.numClasses()];
for (int i = 0; i < predictedI.numClasses(); i++) {
classes[i] = new Instances(predictedI, 0);
classes[i].setRelationName(predictedI.classAttribute().value(i));
}
for (int i = 0; i < predictedI.numInstances(); i++) {
Instance current = predictedI.instance(i);
classes[(int) current.classValue()].add((Instance) current.copy());
}
for (Instances classe : classes) {
offscreenPlotData.add(classe);
}
} else {
offscreenPlotData.add(new Instances(predictedI));
}
List<String> options = new ArrayList<String>();
String[] optionsParts = additional.split(",");
for (String p : optionsParts) {
options.add(p.trim());
}
// only need the x-axis (used to specify the attribute to plot)
String xAxis = m_xAxis;
xAxis = environmentSubstitute(xAxis);
String width = m_width;
String height = m_height;
int defWidth = 500;
int defHeight = 400;
width = environmentSubstitute(width);
height = environmentSubstitute(height);
defWidth = Integer.parseInt(width);
defHeight = Integer.parseInt(height);
try {
getStepManager().logDetailed("Creating image");
BufferedImage osi =
m_offscreenRenderer.renderHistogram(defWidth, defHeight,
offscreenPlotData, xAxis, options);
Data imageData = new Data(StepManager.CON_IMAGE, osi);
String relationName = predictedI.relationName();
if (relationName.length() > 10) {
relationName = relationName.substring(0, 10);
}
imageData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
relationName + ":" + m_xAxis);
getStepManager().outputData(imageData);
} catch (Exception e1) {
e1.printStackTrace();
}
}
/**
* Get a map of popup viewers that can be used with this step
*
* @return a map of popup viewers
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_data.size() > 0) {
views.put("Show plots",
"weka.gui.knowledgeflow.steps.AttributeSummarizerInteractiveView");
}
return views;
}
/**
* Configures the offscreen renderer to use
*/
protected void setupOffscreenRenderer() {
getStepManager().logDetailed(
"Initializing offscreen renderer: " + getOffscreenRendererName());
if (m_offscreenRenderer == null) {
if (m_offscreenRendererName == null
|| m_offscreenRendererName.length() == 0) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
return;
}
if (m_offscreenRendererName.equalsIgnoreCase("weka chart renderer")) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
} else {
try {
Object r =
PluginManager.getPluginInstance(
"weka.gui.beans.OffscreenChartRenderer", m_offscreenRendererName);
if (r != null && r instanceof weka.gui.beans.OffscreenChartRenderer) {
m_offscreenRenderer = (OffscreenChartRenderer) r;
} else {
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
} catch (Exception ex) {
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
}
}
}
/**
* Get a list of outgoing connections that this step can produce at this time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return getStepManager().numIncomingConnections() > 0 ? Arrays
.asList(StepManager.CON_IMAGE) : new ArrayList<String>();
}
/**
* Get the fully qualified class name of the custom editor for this step
*
* @return the class name of the custom editor for this step
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.AttributeSummarizerStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/BaseSimpleDataVisualizer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* BaseSimpleDataVisualizer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
/**
* Abstract base class for simple data visualization steps that just collect
* data sets for visualization.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public abstract class BaseSimpleDataVisualizer extends BaseStep implements
DataCollector {
private static final long serialVersionUID = 4955068920302509451L;
/** The datasets seen so far */
protected List<Data> m_data = new ArrayList<Data>();
@Override
public void stepInit() throws WekaException {
// Nothing to do
}
/**
* Process incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
processIncoming(data, true);
}
/**
* Process incoming data. Subclasses can override as necessary
*
* @param data the data to process
* @param notifyFinished true to notify the Knowledge Flow environment that we
* have finished processing
*/
protected synchronized void
processIncoming(Data data, boolean notifyFinished) {
getStepManager().processing();
Instances toPlot = data.getPrimaryPayload();
String name = (new SimpleDateFormat("HH:mm:ss.SSS - ")).format(new Date());
String title = name + toPlot.relationName();
int setNum = data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
int maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
title += " set " + setNum + " of " + maxSetNum;
getStepManager().logDetailed("Processing " + title);
data.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, title);
m_data.add(data);
if (notifyFinished) {
getStepManager().finished();
}
}
/**
* Get a list of incoming connection types that this step can accept at this
* time
*
* @return a list of incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET);
}
/**
* Get a list of outgoing connection types that this step can produce at this
* time. Subclasses to override (if necessary). This default implementation
* returns null (i.e. does not produce any outgoing data).
*
* @return a list of outgoing connection types that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
/**
* Get the datasets seen so far
*
* @return a list of datasets
*/
public List<Data> getDatasets() {
return m_data;
}
@Override
public Object retrieveData() {
return getDatasets();
}
@SuppressWarnings("unchecked")
@Override
public void restoreData(Object data) throws WekaException {
if (!(data instanceof List)) {
throw new WekaException("Was expecting an instance of a List");
}
m_data = ((List<Data>) data);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/BaseStep.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* BaseStep.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.util.Map;
import weka.core.Defaults;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepInteractiveViewer;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.LoggingLevel;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
/**
* Base class for implementations of Step to use. Provides handy functions that
* automatically setup the step's name and "about" info, provide access to the
* step's StepManager and for resolving environment variables.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public abstract class BaseStep implements Step, BaseStepExtender, Serializable {
private static final long serialVersionUID = -1595753549991953141L;
/** The name of this step component */
protected String m_stepName = "";
/** The step manager to use */
protected transient StepManager m_stepManager;
/** True if the step is resource (cpu/memory) intensive */
protected boolean m_stepIsResourceIntensive;
/**
* Constructor
*/
public BaseStep() {
String clazzName = this.getClass().getCanonicalName();
clazzName = clazzName.substring(clazzName.lastIndexOf(".") + 1);
this.setName(clazzName);
Annotation[] annotations = this.getClass().getAnnotations();
for (Annotation a : annotations) {
if (a instanceof KFStep) {
String name = ((KFStep) a).name();
if (name.length() > 0) {
this.setName(name);
break;
}
}
}
}
/**
* Attempt to get default "about" information for this step by grabbing the
* toolTip from the KFStep annotation.
*
* @return a default "about" info string if this step uses the KFStep
* annotation and null otherwise. Subclasses should override to
* provide more comprehensive about info
*/
public String globalInfo() {
Annotation[] annotations = this.getClass().getAnnotations();
for (Annotation a : annotations) {
if (a instanceof KFStep) {
return ((KFStep) a).toolTipText();
}
}
return null;
}
/**
* Get the step manager for this step
*
* @return the step manager for this step
*/
@NotPersistable
@Override
public StepManager getStepManager() {
return this.m_stepManager;
}
/**
* Set the step manager for this step
*
* @param manager the step manager to use
*/
@Override
public void setStepManager(final StepManager manager) {
this.m_stepManager = manager;
}
/**
* Set whether this step is resource intensive (cpu/memory) or not. This
* affects which executor service is used to execute the step's processing.
*
* @param isResourceIntensive true if this step is resource intensive.
*/
@ProgrammaticProperty
public void setStepIsResourceIntensive(final boolean isResourceIntensive) {
this.getStepManager().setStepIsResourceIntensive(isResourceIntensive);
}
/**
* Get whether this step is resource intensive (cpu/memory) or not.
*
* @return true if this step is resource intensive
*/
public boolean isResourceIntensive() {
return this.getStepManager().stepIsResourceIntensive();
}
/**
* Set whether this step must run single threaded. I.e. on an executor
* service which has only one worker thread, thus effectively preventing
* more than one copy of the step executing at any one time.
*
* @param mustRunSingleThreaded true if the step must run single threaded
*/
@ProgrammaticProperty
public void setStepMustRunSingleThreaded(final boolean mustRunSingleThreaded) {
this.getStepManager().setStepMustRunSingleThreaded(mustRunSingleThreaded);
}
/**
* Get whether this step must run single threaded. I.e. on an executor
* service which has only one worker thread, thus effectively preventing
* more than one copy of the step executing at any one time.
*
* @return true if the step must run single threaded
*/
public boolean stepMustRunSingleThreaded() {
return this.getStepManager().getStepMustRunSingleThreaded();
}
/**
* Get the name of this step
*
* @return the name of this step
*/
@Override
public String getName() {
return this.m_stepName;
}
/**
* Set the name of this step
*
* @param name the name for this step
*/
@ProgrammaticProperty
@Override
public void setName(final String name) {
this.m_stepName = name;
}
/**
* Start processing. Subclasses should override this method if they can act as
* a start point in a flow.
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
// no-op. Subclass should override if it acts as a start point
}
/**
* Request that processing be stopped. Subclasses should call
* {@code isStopRequested()} periodically to see if they should stop
* processing.
*/
@Override
public void stop() {
if (!(this instanceof Note)) {
// don't want any logging or status updates for Notes :-)
this.getStepManager().statusMessage("INTERRUPTED");
this.getStepManager().log("Interrupted", LoggingLevel.LOW);
}
// m_stopRequested = true;
((StepManagerImpl) this.getStepManager()).setStopRequested(true);
// if this step is processing incrementally then this will ensure
// that the busy flag gets set to false. This means that clients
// processing incremental stuff *must* use the throughput update
// mechanism
this.getStepManager().throughputUpdateEnd();
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(final String connectionName) throws WekaException {
// no-op default
return null;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
* @throws InterruptedException
*/
@Override
public void processIncoming(final Data data) throws WekaException, InterruptedException {
// no-op. Subclass should override if it accepts incoming data
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return null;
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
return null;
}
/**
* An alternative to getStepInteractiveViewers that returns a Map of
* instantiated StepInteractiveViewer objects. Generally,
* getInteractiveViewers() is the preferred mechanism to specify any
* interactive viewers, as it does not require Steps to import and instantiate
* GUI classes. However, in some cases it might be unavoidable (e.g. Groovy
* script compilation involves custom classloaders), in these cases this
* method can be used instead.
*
* @return a map of instantiated instances of StepInteractiveViewers
*/
@Override
public Map<String, StepInteractiveViewer> getInteractiveViewersImpls() {
return null;
}
/**
* Get default settings for the step (if any). Returning null indicates that
* the step has no user-editable defaults.
*
* @return the default settings
*/
@Override
public Defaults getDefaultSettings() {
return null;
}
/**
* Convenience method that calls {@code StepManager.isStopRequested()}
*
* @return true if the execution environment has requested processing to stop
*/
public boolean isStopRequested() {
return this.getStepManager().isStopRequested();
}
/**
* Substitute the values of environment variables in the given string
*
* @param source the source string to substitute in
* @return the source string with all known environment variables resolved
*/
public String environmentSubstitute(final String source) {
return this.getStepManager().environmentSubstitute(source);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/BaseStepExtender.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* BaseStepExtender.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.List;
import weka.core.WekaException;
import weka.knowledgeflow.Data;
/**
* A minimal set of methods, duplicated from the Step interface, that a simple
* subclass of BaseStep would need to implement in order to function as a start
* and/or main processing step in the Knowledge Flow.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
* @see Step
* @see BaseStep
*/
public interface BaseStepExtender {
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
void stepInit() throws WekaException;
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
List<String> getIncomingConnectionTypes();
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
List<String> getOutgoingConnectionTypes();
/**
* Start executing (if this component is a start point). Either this method,
* processIncoming(), or both must be implemented.
*
* @throws WekaException if a problem occurs
*/
void start() throws WekaException;
/**
* Process an incoming data payload (if the step accepts incoming
* connections). Either this method, start(), or both must be implemented.
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
void processIncoming(Data data) throws WekaException, InterruptedException;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Block.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Block.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
/**
* A step that waits for a specified step to finish processing before allowing
* incoming data to proceed downstream.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Block", category = "Flow",
toolTipText = "Block until a specific step has finished procesing",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DiamondPlain.gif")
public class Block extends BaseStep {
private static final long serialVersionUID = 3204082191908877620L;
/** The name of the step to wait for */
protected String m_stepToWaitFor = "";
/** The {@code StepManager} of the step to wait for */
protected transient StepManager m_smForStep;
/**
* Set the step to wait for
*
* @param stepToWaitFor the step to wait for
*/
@OptionMetadata(displayName = "Wait until this step has completed",
description = "This step will prevent data from passing downstream until "
+ "the specified step has finished processing")
public void setStepToWaitFor(String stepToWaitFor) {
m_stepToWaitFor = stepToWaitFor;
}
/**
* Get the step to wait for
*
* @return the step to wait for
*/
public String getStepToWaitFor() {
return m_stepToWaitFor;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
if (m_stepToWaitFor == null || m_stepToWaitFor.length() == 0) {
getStepManager().logWarning(
"No step to wait for specified - will not block");
}
m_smForStep =
getStepManager().findStepInFlow(environmentSubstitute(m_stepToWaitFor));
if (m_smForStep == getStepManager()) {
// don't block on our self!!
throw new WekaException("Blocking on oneself will cause deadlock!");
}
if (m_smForStep == null) {
throw new WekaException("Step '" + environmentSubstitute(m_stepToWaitFor)
+ "' does not seem " + "to exist in the flow!");
}
}
/**
* Process incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_smForStep == null) {
// just pass data through
getStepManager().outputData(data);
} else {
getStepManager().processing();
getStepManager().logBasic(
"Waiting for step '" + environmentSubstitute(m_stepToWaitFor) + "'");
getStepManager().statusMessage(
"Waiting for step '" + environmentSubstitute(m_stepToWaitFor) + "'");
while (!m_smForStep.isStepFinished()) {
if (isStopRequested()) {
break;
}
try {
Thread.sleep(300);
} catch (InterruptedException e) {
getStepManager().interrupted();
return;
}
}
getStepManager().logBasic("Releasing data");
getStepManager().statusMessage("Releasing data");
}
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().outputData(data);
getStepManager().finished();
}
}
/**
* Get a list of incoming connection types that this step can accept at this
* time
*
* @return a list of incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_INSTANCE, StepManager.CON_TESTSET,
StepManager.CON_BATCH_CLASSIFIER, StepManager.CON_BATCH_CLUSTERER,
StepManager.CON_BATCH_ASSOCIATOR, StepManager.CON_TEXT);
}
/**
* Get a list of outgoing connection types that this step can produce at this
* time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
Set<String> inConnTypes =
getStepManager().getIncomingConnections().keySet();
return new ArrayList<String>(inConnTypes);
}
/**
* Get the fully qualified class name of the custom editor for this step
*
* @return the class name of the custom editor
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.BlockStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/BoundaryPlotter.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* BoundaryPlotter.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.Classifier;
import weka.clusterers.AbstractClusterer;
import weka.clusterers.DensityBasedClusterer;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.OptionMetadata;
import weka.core.SerializedObject;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.boundaryvisualizer.DataGenerator;
import weka.gui.boundaryvisualizer.KDDataGenerator;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.ExecutionResult;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepTask;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Future;
/**
* A step that computes visualization data for class/cluster decision
* boundaries.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "BoundaryPlotter", category = "Visualization",
toolTipText = "Visualize class/cluster decision boundaries in a 2D plot",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DefaultDataVisualizer.gif")
public class BoundaryPlotter extends BaseStep implements DataCollector {
/** default colours for classes */
public static final Color[] DEFAULT_COLORS = { Color.red, Color.green,
Color.blue, new Color(0, 255, 255), // cyan
new Color(255, 0, 255), // pink
new Color(255, 255, 0), // yellow
new Color(255, 255, 255), // white
new Color(0, 0, 0) };
private static final long serialVersionUID = 7864251468395026619L;
/** Holds colors to use */
protected List<Color> m_Colors = new ArrayList<Color>();
/**
* Number of rows of the visualization to compute in parallel. We don't want
* to dominate the thread pool that is used for executing all steps and step
* sub-tasks in the KF (this is currently fixed at 50 threads by FlowRunner).
*/
protected int m_maxRowsInParallel = 10;
/** Width of images to generate */
protected int m_imageWidth = 400;
/** Height of images to generate */
protected int m_imageHeight = 400;
/** X axis attribute name/index */
protected String m_xAttName = "/first";
/** Y axis attribute name/index */
protected String m_yAttName = "2";
/** Superimpose the training data on the plot? */
protected boolean m_plotTrainingData = true;
// attribute indices for visualizing on
protected int m_xAttribute;
protected int m_yAttribute;
// min, max and ranges of these attributes
protected double m_minX;
protected double m_minY;
protected double m_maxX;
protected double m_maxY;
protected double m_rangeX;
protected double m_rangeY;
// pixel width and height in terms of attribute values
protected double m_pixHeight;
protected double m_pixWidth;
/** The currently rendering image */
protected transient BufferedImage m_osi;
/** The spec of the scheme being used to render the current image */
protected String m_currentDescription;
/** Completed images */
protected transient Map<String, BufferedImage> m_completedImages;
/** Classifiers to use */
protected List<Classifier> m_classifierTemplates;
/** Clusterers to use */
protected List<DensityBasedClusterer> m_clustererTemplates;
/** Copies of trained classifier to use in parallel for prediction */
protected weka.classifiers.Classifier[] m_threadClassifiers;
/** Copies of trained clusterer to use in parallel for prediction */
protected weka.clusterers.Clusterer[] m_threadClusterers;
/** Data generator copies to use in parallel */
protected DataGenerator[] m_threadGenerators;
/** The data generator to use */
protected KDDataGenerator m_dataGenerator;
/** User-specified bandwidth */
protected String m_kBand = "3";
/** User-specified num samples */
protected String m_nSamples = "2";
/** User-specified base for sampling */
protected String m_sBase = "2";
/** Parsed bandwidth */
protected int m_kernelBandwidth = 3;
/** Parsed samples */
protected int m_numSamplesPerRegion = 2;
/** Parsed base */
protected int m_samplesBase = 2;
/** Open interactive view? */
protected transient RenderingUpdateListener m_plotListener;
/** True if we've been reset */
protected boolean m_isReset;
/**
* Constructor
*/
public BoundaryPlotter() {
for (Color element : DEFAULT_COLORS) {
m_Colors.add(new Color(element.getRed(), element.getGreen(), element
.getBlue()));
}
}
/**
* Set the name/index of the X axis attribute
*
* @param xAttName name/index of the X axis attribute
*/
// make programmatic as our dialog will handle these directly, rather than
// deferring to the GOE
@ProgrammaticProperty
@OptionMetadata(displayName = "X attribute",
description = "Attribute to visualize on the x-axis", displayOrder = 1)
public void setXAttName(String xAttName) {
m_xAttName = xAttName;
}
/**
* Get the name/index of the X axis attribute
*
* @return the name/index of the X axis attribute
*/
public String getXAttName() {
return m_xAttName;
}
/**
* Set the name/index of the Y axis attribute
*
* @param attName name/index of the Y axis attribute
*/
// make programmatic as our dialog will handle these directly, rather than
// deferring to the GOE
@ProgrammaticProperty
@OptionMetadata(displayName = "Y attribute",
description = "Attribute to visualize on the y-axis", displayOrder = 2)
public void setYAttName(String attName) {
m_yAttName = attName;
}
/**
* Get the name/index of the Y axis attribute
*
* @return the name/index of the Y axis attribute
*/
public String getYAttName() {
return m_yAttName;
}
/**
* Set the base for sampling
*
* @param base the base to use
*/
@OptionMetadata(displayName = "Base for sampling (r)",
description = "The base for sampling", displayOrder = 3)
public void setBaseForSampling(String base) {
m_sBase = base;
}
/**
* Get the base for sampling
*
* @return the base to use
*/
public String getBaseForSampling() {
return m_sBase;
}
/**
* Set the number of locations/samples per pixel
*
* @param num the number of samples to use
*/
@OptionMetadata(displayName = "Num. locations per pixel",
description = "Number of locations per pixel", displayOrder = 4)
public void setNumLocationsPerPixel(String num) {
m_nSamples = num;
}
/**
* Get the number of locations/samples per pixel
*
* @return the number of samples to use
*/
public String getNumLocationsPerPixel() {
return m_nSamples;
}
/**
* Set the kernel bandwidth
*
* @param band the bandwidth
*/
@OptionMetadata(displayName = "Kernel bandwidth (k)",
description = "Kernel bandwidth", displayOrder = 4)
public void setKernelBandwidth(String band) {
m_kBand = band;
}
/**
* Get the kernel bandwidth
*
* @return the bandwidth
*/
public String getKernelBandwidth() {
return m_kBand;
}
/**
* Set the image width (in pixels)
*
* @param width the width to use
*/
@OptionMetadata(displayName = "Image width (pixels)",
description = "Image width in pixels", displayOrder = 5)
public void setImageWidth(int width) {
m_imageWidth = width;
}
/**
* Get the image width (in pixels)
*
* @return the width to use
*/
public int getImageWidth() {
return m_imageWidth;
}
/**
* Set the image height (in pixels)
*
* @param height the height to use
*/
@OptionMetadata(displayName = "Image height (pixels)",
description = "Image height in pixels", displayOrder = 6)
public void setImageHeight(int height) {
m_imageHeight = height;
}
/**
* Get the image height (in pixels)
*
* @return the height to use
*/
public int getImageHeight() {
return m_imageHeight;
}
/**
* Set the maximum number of threads to use when computing image rows
*
* @param max maximum number of rows to compute in parallel
*/
@OptionMetadata(displayName = "Max image rows to compute in parallel",
description = "Use this many tasks for computing rows of the image",
displayOrder = 7)
public void setComputeMaxRowsInParallel(int max) {
if (max > 0) {
m_maxRowsInParallel = max;
}
}
/**
* Get the maximum number of threads to use when computing image rows
*
* @return the maximum number of rows to compute in parallel
*/
public int getComputeMaxRowsInParallel() {
return m_maxRowsInParallel;
}
/**
* Set whether to superimpose the training data points on the plot or not
*
* @param plot true to plot the training data
*/
@OptionMetadata(displayName = "Plot training points",
description = "Superimpose the training data over the top of the plot",
displayOrder = 8)
public void setPlotTrainingData(boolean plot) {
m_plotTrainingData = plot;
}
/**
* Get whether to superimpose the training data points on the plot or not
*
* @return true if plotting the training data
*/
public boolean getPlotTrainingData() {
return m_plotTrainingData;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
List<StepManager> infos =
getStepManager().getIncomingConnectedStepsOfConnectionType(
StepManager.CON_INFO);
if (infos.size() == 0) {
throw new WekaException(
"One or more classifiers/clusterers need to be supplied via an 'info' "
+ "connection type");
}
m_classifierTemplates = new ArrayList<Classifier>();
m_clustererTemplates = new ArrayList<DensityBasedClusterer>();
for (StepManager m : infos) {
Step info = m.getInfoStep();
if (info instanceof weka.knowledgeflow.steps.Classifier) {
m_classifierTemplates.add(((weka.knowledgeflow.steps.Classifier) info)
.getClassifier());
} else if (info instanceof weka.knowledgeflow.steps.Clusterer) {
weka.clusterers.Clusterer c =
((weka.knowledgeflow.steps.Clusterer) info).getClusterer();
if (!(c instanceof DensityBasedClusterer)) {
throw new WekaException("Clusterer "
+ c.getClass().getCanonicalName()
+ " is not a DensityBasedClusterer");
}
m_clustererTemplates.add((DensityBasedClusterer) c);
}
}
m_completedImages = new LinkedHashMap<String, BufferedImage>();
if (m_nSamples != null && m_nSamples.length() > 0) {
String nSampes = environmentSubstitute(m_nSamples);
try {
m_numSamplesPerRegion = Integer.parseInt(nSampes);
} catch (NumberFormatException ex) {
getStepManager().logWarning(
"Unable to parse '" + nSampes + "' for num "
+ "samples per region parameter, using default: "
+ m_numSamplesPerRegion);
}
}
if (m_sBase != null && m_sBase.length() > 0) {
String sBase = environmentSubstitute(m_sBase);
try {
m_samplesBase = Integer.parseInt(sBase);
} catch (NumberFormatException ex) {
getStepManager().logWarning(
"Unable to parse '" + sBase + "' for "
+ "the base for sampling parameter, using default: "
+ m_samplesBase);
}
}
if (m_kBand != null && m_kBand.length() > 0) {
String kBand = environmentSubstitute(m_kBand);
try {
m_kernelBandwidth = Integer.parseInt(kBand);
} catch (NumberFormatException ex) {
getStepManager().logWarning(
"Unable to parse '" + kBand + "' for kernel "
+ "bandwidth parameter, using default: " + m_kernelBandwidth);
}
}
/*
* m_osi = new BufferedImage(m_imageWidth, m_imageHeight,
* BufferedImage.TYPE_INT_RGB);
*/
m_isReset = true;
}
protected void computeMinMaxAtts(Instances trainingData) {
m_minX = Double.MAX_VALUE;
m_minY = Double.MAX_VALUE;
m_maxX = Double.MIN_VALUE;
m_maxY = Double.MIN_VALUE;
boolean allPointsLessThanOne = true;
if (trainingData.numInstances() == 0) {
m_minX = m_minY = 0.0;
m_maxX = m_maxY = 1.0;
} else {
for (int i = 0; i < trainingData.numInstances(); i++) {
Instance inst = trainingData.instance(i);
double x = inst.value(m_xAttribute);
double y = inst.value(m_yAttribute);
if (!Utils.isMissingValue(x) && !Utils.isMissingValue(y)) {
if (x < m_minX) {
m_minX = x;
}
if (x > m_maxX) {
m_maxX = x;
}
if (y < m_minY) {
m_minY = y;
}
if (y > m_maxY) {
m_maxY = y;
}
if (x > 1.0 || y > 1.0) {
allPointsLessThanOne = false;
}
}
}
}
if (m_minX == m_maxX) {
m_minX = 0;
}
if (m_minY == m_maxY) {
m_minY = 0;
}
if (m_minX == Double.MAX_VALUE) {
m_minX = 0;
}
if (m_minY == Double.MAX_VALUE) {
m_minY = 0;
}
if (m_maxX == Double.MIN_VALUE) {
m_maxX = 1;
}
if (m_maxY == Double.MIN_VALUE) {
m_maxY = 1;
}
if (allPointsLessThanOne) {
// m_minX = m_minY = 0.0;
m_maxX = m_maxY = 1.0;
}
m_rangeX = (m_maxX - m_minX);
m_rangeY = (m_maxY - m_minY);
m_pixWidth = m_rangeX / m_imageWidth;
m_pixHeight = m_rangeY / m_imageHeight;
}
protected int getAttIndex(String attName, Instances data)
throws WekaException {
attName = environmentSubstitute(attName);
int index = -1;
if (attName.equalsIgnoreCase("first") || attName.equalsIgnoreCase("/first")) {
index = 0;
} else if (attName.equalsIgnoreCase("last")
|| attName.equalsIgnoreCase("/last")) {
index = data.numAttributes() - 1;
} else {
Attribute a = data.attribute(attName);
if (a != null) {
index = a.index();
} else {
// try parsing as a number
try {
index = Integer.parseInt(attName);
index--;
} catch (NumberFormatException ex) {
}
}
}
if (index == -1) {
throw new WekaException("Unable to find attribute '" + attName
+ "' in the data " + "or to parse it as an index");
}
return index;
}
protected void initDataGenerator(Instances trainingData) throws WekaException {
boolean[] attsToWeightOn;
// build DataGenerator
attsToWeightOn = new boolean[trainingData.numAttributes()];
attsToWeightOn[m_xAttribute] = true;
attsToWeightOn[m_yAttribute] = true;
m_dataGenerator = new KDDataGenerator();
m_dataGenerator.setWeightingDimensions(attsToWeightOn);
m_dataGenerator.setKernelBandwidth(m_kernelBandwidth);
try {
m_dataGenerator.buildGenerator(trainingData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
Instances training = data.getPrimaryPayload();
Integer setNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
m_xAttribute = getAttIndex(m_xAttName, training);
m_yAttribute = getAttIndex(m_yAttName, training);
computeMinMaxAtts(training);
initDataGenerator(training);
for (Classifier c : m_classifierTemplates) {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
// do classifiers
doScheme(c, null, training, setNum, maxSetNum);
}
for (DensityBasedClusterer c : m_clustererTemplates) {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
doScheme(null, c, training, setNum, maxSetNum);
}
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().finished();
}
}
protected void doScheme(Classifier classifier, DensityBasedClusterer clust,
Instances trainingData, int setNum, int maxSetNum) throws WekaException {
try {
m_osi =
new BufferedImage(m_imageWidth, m_imageHeight,
BufferedImage.TYPE_INT_RGB);
m_currentDescription =
makeSchemeSpec(classifier != null ? classifier : clust, setNum,
maxSetNum);
// notify listeners
getStepManager()
.logBasic("Starting new plot for " + m_currentDescription);
if (m_plotListener != null) {
m_plotListener.newPlotStarted(m_currentDescription);
}
Graphics m = m_osi.getGraphics();
m.fillRect(0, 0, m_imageWidth, m_imageHeight);
Classifier toTrainClassifier = null;
weka.clusterers.DensityBasedClusterer toTrainClusterer = null;
if (classifier != null) {
toTrainClassifier =
(Classifier) AbstractClassifier.makeCopy(classifier);
toTrainClassifier.buildClassifier(trainingData);
} else {
int tempClassIndex = trainingData.classIndex();
trainingData.setClassIndex(-1);
toTrainClusterer =
(DensityBasedClusterer) weka.clusterers.AbstractClusterer
.makeCopy((weka.clusterers.Clusterer) clust);
toTrainClusterer.buildClusterer(trainingData);
trainingData.setClassIndex(tempClassIndex);
}
// populate the thread classifiers ready for parallel processing
if (toTrainClassifier != null) {
m_threadClassifiers =
AbstractClassifier.makeCopies(toTrainClassifier, m_maxRowsInParallel);
} else {
m_threadClusterers =
AbstractClusterer.makeCopies(toTrainClusterer, m_maxRowsInParallel);
}
m_threadGenerators = new DataGenerator[m_maxRowsInParallel];
SerializedObject so = new SerializedObject(m_dataGenerator);
for (int i = 0; i < m_maxRowsInParallel; i++) {
m_threadGenerators[i] = (DataGenerator) so.getObject();
}
int taskCount = 0;
List<Future<ExecutionResult<RowResult>>> results =
new ArrayList<Future<ExecutionResult<RowResult>>>();
for (int i = 0; i < m_imageHeight; i++) {
if (taskCount < m_maxRowsInParallel) {
getStepManager().logDetailed(
"Launching task to compute image row " + i);
SchemeRowTask t = new SchemeRowTask(this);
t.setResourceIntensive(isResourceIntensive());
t.m_classifier = null;
t.m_clusterer = null;
if (toTrainClassifier != null) {
t.m_classifier = m_threadClassifiers[taskCount];
} else {
t.m_clusterer =
(DensityBasedClusterer) m_threadClusterers[taskCount];
}
t.m_rowNum = i;
t.m_xAtt = m_xAttribute;
t.m_yAtt = m_yAttribute;
t.m_imageWidth = m_imageWidth;
t.m_imageHeight = m_imageHeight;
t.m_pixWidth = m_pixWidth;
t.m_pixHeight = m_pixHeight;
t.m_dataGenerator = m_threadGenerators[taskCount];
t.m_trainingData = trainingData;
t.m_minX = m_minX;
t.m_maxX = m_maxX;
t.m_minY = m_minY;
t.m_maxY = m_maxY;
t.m_numOfSamplesPerRegion = m_numSamplesPerRegion;
t.m_samplesBase = m_samplesBase;
results.add(getStepManager().getExecutionEnvironment().submitTask(t));
taskCount++;
} else {
// wait for running tasks
for (Future<ExecutionResult<RowResult>> r : results) {
double[][] rowProbs = r.get().getResult().m_rowProbs;
for (int j = 0; j < m_imageWidth; j++) {
plotPoint(m_osi, j, r.get().getResult().m_rowNumber, rowProbs[j],
j == m_imageWidth - 1);
}
getStepManager().statusMessage(
"Completed row " + r.get().getResult().m_rowNumber);
getStepManager().logDetailed(
"Completed image row " + r.get().getResult().m_rowNumber);
}
results.clear();
taskCount = 0;
if (i != m_imageHeight - 1) {
i--;
}
if (isStopRequested()) {
return;
}
}
}
if (results.size() > 0) {
// wait for running tasks
for (Future<ExecutionResult<RowResult>> r : results) {
double[][] rowProbs = r.get().getResult().m_rowProbs;
for (int i = 0; i < m_imageWidth; i++) {
plotPoint(m_osi, i, r.get().getResult().m_rowNumber, rowProbs[i],
i == m_imageWidth - 1);
}
getStepManager().statusMessage(
"Completed row " + r.get().getResult().m_rowNumber);
getStepManager().logDetailed(
"Completed image row " + r.get().getResult().m_rowNumber);
}
if (isStopRequested()) {
return;
}
}
if (m_plotTrainingData) {
plotTrainingData(trainingData);
}
m_completedImages.put(m_currentDescription, m_osi);
Data imageOut = new Data(StepManager.CON_IMAGE, m_osi);
imageOut.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
m_currentDescription);
getStepManager().outputData(imageOut);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
protected String makeSchemeSpec(Object scheme, int setNum, int maxSetNum) {
String name = scheme.getClass().getCanonicalName();
name = name.substring(name.lastIndexOf('.') + 1, name.length());
if (scheme instanceof OptionHandler) {
name += " " + Utils.joinOptions(((OptionHandler) scheme).getOptions());
}
if (maxSetNum != 1) {
name += " (set " + setNum + " of " + maxSetNum + ")";
}
return name;
}
protected void plotPoint(BufferedImage osi, int x, int y, double[] probs,
boolean update) {
Graphics osg = osi.getGraphics();
osg.setPaintMode();
float[] colVal = new float[3];
float[] tempCols = new float[3];
for (int k = 0; k < probs.length; k++) {
Color curr = m_Colors.get(k % m_Colors.size());
curr.getRGBColorComponents(tempCols);
for (int z = 0; z < 3; z++) {
colVal[z] += probs[k] * tempCols[z];
}
}
for (int z = 0; z < 3; z++) {
if (colVal[z] < 0) {
colVal[z] = 0;
} else if (colVal[z] > 1) {
colVal[z] = 1;
}
}
osg.setColor(new Color(colVal[0], colVal[1], colVal[2]));
osg.fillRect(x, y, 1, 1);
if (update) {
// end of row
// generate an update event for interactive viewer to consume
if (m_plotListener != null) {
m_plotListener.currentPlotRowCompleted(y);
}
}
}
public void plotTrainingData(Instances trainingData) {
Graphics2D osg = (Graphics2D) m_osi.getGraphics();
// Graphics g = m_plotPanel.getGraphics();
osg.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
double xval = 0;
double yval = 0;
for (int i = 0; i < trainingData.numInstances(); i++) {
if (!trainingData.instance(i).isMissing(m_xAttribute)
&& !trainingData.instance(i).isMissing(m_yAttribute)) {
xval = trainingData.instance(i).value(m_xAttribute);
yval = trainingData.instance(i).value(m_yAttribute);
int panelX = convertToImageX(xval);
int panelY = convertToImageY(yval);
Color colorToPlotWith = Color.white;
if (trainingData.classIndex() > 0) {
colorToPlotWith =
m_Colors.get((int) trainingData.instance(i).value(
trainingData.classIndex())
% m_Colors.size());
}
if (colorToPlotWith.equals(Color.white)) {
osg.setColor(Color.black);
} else {
osg.setColor(Color.white);
}
osg.fillOval(panelX - 3, panelY - 3, 7, 7);
osg.setColor(colorToPlotWith);
osg.fillOval(panelX - 2, panelY - 2, 5, 5);
}
}
if (m_plotListener != null) {
m_plotListener.renderingImageUpdate();
}
}
private int convertToImageX(double xval) {
double temp = (xval - m_minX) / m_rangeX;
temp = temp * m_imageWidth;
return (int) temp;
}
private int convertToImageY(double yval) {
double temp = (yval - m_minY) / m_rangeY;
temp = temp * m_imageHeight;
temp = m_imageHeight - temp;
return (int) temp;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_INFO);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return Arrays.asList(StepManager.CON_IMAGE);
}
/**
* Get the completed images
*
* @return a map of completed images
*/
public Map<String, BufferedImage> getImages() {
return m_completedImages;
}
/**
* Get the currently rendering image
*
* @return the current image
*/
public BufferedImage getCurrentImage() {
return m_osi;
}
/**
* Set a listener to receive rendering updates
*
* @param l the {@code RenderingUpdateListener} to add
*/
public void setRenderingListener(RenderingUpdateListener l) {
m_plotListener = l;
}
/**
* Remove the rendering update listener
*
* @param l the {@code RenderingUpdateListener} to remove
*/
public void removeRenderingListener(RenderingUpdateListener l) {
if (l == m_plotListener) {
m_plotListener = null;
}
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_plotListener == null) {
views.put("Show plots",
"weka.gui.knowledgeflow.steps.BoundaryPlotterInteractiveView");
}
return views;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.BoundaryPlotterStepEditorDialog";
}
/**
* Get the map of completed images
*
* @return the map of completed images
*/
@Override
public Object retrieveData() {
return ImageViewer.bufferedImageMapToSerializableByteMap(m_completedImages);
}
/**
* Set a map of images.
*
* @param data the images to set
* @throws WekaException if a problem occurs
*/
@Override
@SuppressWarnings("unchecked")
public void restoreData(Object data) throws WekaException {
if (!(data instanceof Map)) {
throw new IllegalArgumentException("Argument must be a Map");
}
try {
m_completedImages =
ImageViewer
.byteArrayImageMapToBufferedImageMap((Map<String, byte[]>) data);
} catch (IOException ex) {
throw new WekaException(ex);
}
}
/**
* Interface for something that wants to be informed of rendering progress
* updates
*/
public interface RenderingUpdateListener {
/**
* Called when a new plot is started
*
* @param description the description/title of the plot
*/
void newPlotStarted(String description);
/**
* Called when rendering of a row in the current plot has completed
*
* @param row the index of the row that was completed
*/
void currentPlotRowCompleted(int row);
/**
* Called when a change (other than rendering a row) to the current plot has
* occurred.
*/
void renderingImageUpdate();
}
/**
* Holds computed image data for a row of an image
*/
protected static class RowResult {
/** Probabilities for the pixels in a row of the image */
protected double[][] m_rowProbs;
/** The row number of this result */
protected int m_rowNumber;
}
/**
* A task for computing a row of an image using a trained model
*/
protected static class SchemeRowTask extends StepTask<RowResult> implements
Serializable {
private static final long serialVersionUID = -4144732293602550066L;
protected int m_xAtt;
protected int m_yAtt;
protected int m_rowNum;
protected int m_imageWidth;
protected int m_imageHeight;
protected double m_pixWidth;
protected double m_pixHeight;
protected weka.classifiers.Classifier m_classifier;
protected weka.clusterers.DensityBasedClusterer m_clusterer;
protected DataGenerator m_dataGenerator;
protected Instances m_trainingData;
protected double m_minX;
protected double m_maxX;
protected double m_minY;
protected double m_maxY;
protected int m_numOfSamplesPerRegion;
protected double m_samplesBase;
private Random m_random;
private int m_numOfSamplesPerGenerator;
private boolean[] m_attsToWeightOn;
private double[] m_weightingAttsValues;
private double[] m_vals;
private double[] m_dist;
Instance m_predInst;
public SchemeRowTask(Step source) {
super(source);
}
@Override
public void process() throws Exception {
RowResult result = new RowResult();
result.m_rowNumber = m_rowNum;
result.m_rowProbs = new double[m_imageWidth][0];
m_random = new Random(m_rowNum * 11);
m_dataGenerator.setSeed(m_rowNum * 11);
m_numOfSamplesPerGenerator =
(int) Math.pow(m_samplesBase, m_trainingData.numAttributes() - 3);
if (m_trainingData == null) {
throw new Exception("No training data set");
}
if (m_classifier == null && m_clusterer == null) {
throw new Exception("No scheme set");
}
if (m_dataGenerator == null) {
throw new Exception("No data generator set");
}
if (m_trainingData.attribute(m_xAtt).isNominal()
|| m_trainingData.attribute(m_yAtt).isNominal()) {
throw new Exception("Visualization dimensions must be numeric");
}
m_attsToWeightOn = new boolean[m_trainingData.numAttributes()];
m_attsToWeightOn[m_xAtt] = true;
m_attsToWeightOn[m_yAtt] = true;
// generate samples
m_weightingAttsValues = new double[m_attsToWeightOn.length];
m_vals = new double[m_trainingData.numAttributes()];
m_predInst = new DenseInstance(1.0, m_vals);
m_predInst.setDataset(m_trainingData);
getLogHandler().logDetailed("Computing row number: " + m_rowNum);
for (int j = 0; j < m_imageWidth; j++) {
double[] preds = calculateRegionProbs(j, m_rowNum);
result.m_rowProbs[j] = preds;
}
getExecutionResult().setResult(result);
}
private double[] calculateRegionProbs(int j, int i) throws Exception {
double[] sumOfProbsForRegion =
new double[m_classifier != null ? m_trainingData.classAttribute()
.numValues() : ((weka.clusterers.Clusterer) m_clusterer)
.numberOfClusters()];
double sumOfSums = 0;
for (int u = 0; u < m_numOfSamplesPerRegion; u++) {
double[] sumOfProbsForLocation =
new double[m_classifier != null ? m_trainingData.classAttribute()
.numValues() : ((weka.clusterers.Clusterer) m_clusterer)
.numberOfClusters()];
m_weightingAttsValues[m_xAtt] = getRandomX(j);
m_weightingAttsValues[m_yAtt] = getRandomY(m_imageHeight - i - 1);
m_dataGenerator.setWeightingValues(m_weightingAttsValues);
double[] weights = m_dataGenerator.getWeights();
double sumOfWeights = Utils.sum(weights);
sumOfSums += sumOfWeights;
int[] indices = Utils.sort(weights);
// Prune 1% of weight mass
int[] newIndices = new int[indices.length];
double sumSoFar = 0;
double criticalMass = 0.99 * sumOfWeights;
int index = weights.length - 1;
int counter = 0;
for (int z = weights.length - 1; z >= 0; z--) {
newIndices[index--] = indices[z];
sumSoFar += weights[indices[z]];
counter++;
if (sumSoFar > criticalMass) {
break;
}
}
indices = new int[counter];
System.arraycopy(newIndices, index + 1, indices, 0, counter);
for (int z = 0; z < m_numOfSamplesPerGenerator; z++) {
m_dataGenerator.setWeightingValues(m_weightingAttsValues);
double[][] values = m_dataGenerator.generateInstances(indices);
for (int q = 0; q < values.length; q++) {
if (values[q] != null) {
System.arraycopy(values[q], 0, m_vals, 0, m_vals.length);
m_vals[m_xAtt] = m_weightingAttsValues[m_xAtt];
m_vals[m_yAtt] = m_weightingAttsValues[m_yAtt];
// classify/cluster the instance
m_dist =
m_classifier != null ? m_classifier
.distributionForInstance(m_predInst) : m_clusterer
.distributionForInstance(m_predInst);
for (int k = 0; k < sumOfProbsForLocation.length; k++) {
sumOfProbsForLocation[k] += (m_dist[k] * weights[q]);
}
}
}
}
for (int k = 0; k < sumOfProbsForRegion.length; k++) {
sumOfProbsForRegion[k] +=
(sumOfProbsForLocation[k] / m_numOfSamplesPerGenerator);
}
}
if (sumOfSums > 0) {
// average
Utils.normalize(sumOfProbsForRegion, sumOfSums);
} else {
throw new Exception(
"Arithmetic underflow. Please increase value of kernel bandwidth " +
"parameter (k).");
}
// cache
double[] tempDist = new double[sumOfProbsForRegion.length];
System.arraycopy(sumOfProbsForRegion, 0, tempDist, 0,
sumOfProbsForRegion.length);
return tempDist;
}
/**
* Return a random x attribute value contained within the pix'th horizontal
* pixel
*
* @param pix the horizontal pixel number
* @return a value in attribute space
*/
private double getRandomX(int pix) {
double minPix = m_minX + (pix * m_pixWidth);
return minPix + m_random.nextDouble() * m_pixWidth;
}
/**
* Return a random y attribute value contained within the pix'th vertical
* pixel
*
* @param pix the vertical pixel number
* @return a value in attribute space
*/
private double getRandomY(int pix) {
double minPix = m_minY + (pix * m_pixHeight);
return minPix + m_random.nextDouble() * m_pixHeight;
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ClassAssigner.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ClassAssigner.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Knowledge Flow step for assigning a class attribute in incoming data
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ClassAssigner", category = "Evaluation",
toolTipText = "Designate which column is to be considered the class column "
+ "in incoming data.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ClassAssigner.gif")
public class ClassAssigner extends BaseStep {
private static final long serialVersionUID = -4269063233834866140L;
/** Holds resoved class column/index */
protected String m_classColumnS = "/last";
/** Holds user-specified class column/index */
protected String m_classCol = "/last";
/** True if the class has already been assigned */
protected boolean m_classAssigned;
/** True if processing an instance stream */
protected boolean m_isInstanceStream;
/** Counter used for streams */
protected int m_streamCount;
/**
* Set the class column to use
*
* @param col the class column to use
*/
public void setClassColumn(String col) {
m_classColumnS = col;
}
/**
* Get the class column to use
*
* @return the class column to use
*/
public String getClassColumn() {
return m_classColumnS;
}
/**
* Initialize the step prior to execution
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
if (m_classColumnS == null || m_classColumnS.length() == 0) {
throw new WekaException("No class column specified!");
}
m_classCol = getStepManager().environmentSubstitute(m_classColumnS).trim();
m_classAssigned = false;
m_isInstanceStream = false;
m_streamCount = 0;
}
/**
* Process incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Object payload = data.getPayloadElement(data.getConnectionName());
if (!m_classAssigned) {
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
m_isInstanceStream = true;
Instance inst = (Instance) payload;
if (inst != null) {
assignClass(inst.dataset());
}
} else {
getStepManager().processing();
if (payload == null) {
throw new WekaException("Incoming data is null!");
}
payload = new Instances((Instances)payload);
assignClass((Instances) payload);
data = new Data(data.getConnectionName(), payload);
}
m_streamCount++;
m_classAssigned = m_streamCount == 2;
}
if (isStopRequested()) {
if (!m_isInstanceStream) {
getStepManager().interrupted();
}
return;
}
if (m_isInstanceStream) {
if (!getStepManager().isStreamFinished(data)) {
getStepManager().throughputUpdateStart();
} else {
getStepManager().throughputFinished(new Data(data.getConnectionName()));
return;
}
getStepManager().throughputUpdateEnd();
}
getStepManager().outputData(data.getConnectionName(), data);
if (!m_isInstanceStream || payload == null) {
if (!isStopRequested()) {
getStepManager().finished();
} else {
getStepManager().interrupted();
}
}
}
/**
* Assign the class to a set of instances
*
* @param dataSet the instances to assign the class to
* @throws WekaException if a problem occurs
*/
protected void assignClass(Instances dataSet) throws WekaException {
Attribute classAtt = dataSet.attribute(m_classCol);
boolean assigned = false;
if (classAtt != null) {
dataSet.setClass(classAtt);
assigned = true;
} else {
if (m_classCol.equalsIgnoreCase("last")
|| m_classCol.equalsIgnoreCase("/last")) {
dataSet.setClassIndex(dataSet.numAttributes() - 1);
assigned = true;
} else if (m_classCol.equalsIgnoreCase("first")
|| m_classCol.equalsIgnoreCase("/first")) {
dataSet.setClassIndex(0);
assigned = true;
} else {
// try parsing as an index
try {
int classIndex = Integer.parseInt(m_classCol);
classIndex--;
if (classIndex >= 0 && classIndex < dataSet.numAttributes()) {
dataSet.setClassIndex(classIndex);
assigned = true;
}
} catch (NumberFormatException ex) {
}
}
}
if (!assigned) {
throw new WekaException(
"Unable to assign '" + m_classCol + "' as the class.");
}
getStepManager()
.logBasic("Assigned '" + dataSet.classAttribute().name() + "' as class.");
}
/**
* Get the incoming connections that this step can accept at this time
*
* @return a list of incoming connection types that can be accepted
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET, StepManager.CON_INSTANCE);
}
return new ArrayList<String>();
}
/**
* Get the outgoing connection types that this step can produce at this time
*
* @return a list of outgoing connections that can be produced
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
result.add(StepManager.CON_INSTANCE);
} else if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
} else if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET) > 0) {
result.add(StepManager.CON_TRAININGSET);
} else if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) > 0) {
result.add(StepManager.CON_TESTSET);
}
return result;
}
/**
* Return the structure of data output by this step for a given incoming
* connection type
*
* @param connectionName the incoming connection type
* @return the structure (header-only instances) of the output
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
m_classCol = getStepManager().environmentSubstitute(m_classColumnS).trim();
if (!(connectionName.equals(StepManager.CON_DATASET)
|| connectionName.equals(StepManager.CON_TRAININGSET)
|| connectionName.equals(StepManager.CON_TESTSET)
|| connectionName.equals(StepManager.CON_INSTANCE))
|| getStepManager().numIncomingConnections() == 0) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
Instances strucForDatasetCon = getStepManager()
.getIncomingStructureForConnectionType(StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
assignClass(strucForDatasetCon);
return strucForDatasetCon;
}
Instances strucForTestsetCon = getStepManager()
.getIncomingStructureForConnectionType(StepManager.CON_TESTSET);
if (strucForTestsetCon != null) {
assignClass(strucForTestsetCon);
return strucForTestsetCon;
}
Instances strucForTrainingCon = getStepManager()
.getIncomingStructureForConnectionType(StepManager.CON_TRAININGSET);
if (strucForTrainingCon != null) {
assignClass(strucForTrainingCon);
return strucForTrainingCon;
}
Instances strucForInstanceCon = getStepManager()
.getIncomingStructureForConnectionType(StepManager.CON_INSTANCE);
if (strucForInstanceCon != null) {
assignClass(strucForInstanceCon);
return strucForInstanceCon;
}
return null;
}
/**
* Get the custom editor for this step
*
* @return the fully qualified class name of the clustom editor for this
* step
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ClassAssignerStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ClassValuePicker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ClassValuePicker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.SwapValues;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Step that allows the selection of the class label that is to be considered as
* the "positive" class when computing threshold curves.
*
* @author Mark Hall
*/
@KFStep(name = "ClassValuePicker", category = "Evaluation",
toolTipText = "Designate which class value is considered the \"positive\" "
+ "class value (useful for ROC analysis)",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ClassValuePicker.gif")
public class ClassValuePicker extends BaseStep {
/** For serialization */
private static final long serialVersionUID = 8558445535347028472L;
/**
* User specified class label, label index or special identifier (e.g
* "first"/"last")
*/
protected String m_classValueS = "/first";
/** Class label after environment variables have been resolved */
protected String m_classValue = "/first";
/** True if the class is set in the incoming data */
protected boolean m_classIsSet;
/** True if the class is set and is nominal */
protected boolean m_classIsNominal;
/**
* Set the class value considered to be the "positive" class value.
*
* @param value the class value index to use
*/
@OptionMetadata(displayName = "Class value",
description = "The class value to consider as the 'positive' class",
displayOrder = 1)
public void setClassValue(String value) {
m_classValueS = value;
}
/**
* Gets the class value considered to be the "positive" class value.
*
* @return the class value index
*/
public String getClassValue() {
return m_classValueS;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_classIsSet = true;
m_classIsNominal = true;
m_classValue = getStepManager().environmentSubstitute(m_classValueS).trim();
if (m_classValue.length() == 0) {
throw new WekaException("No class label specified as the positive class!");
}
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
Instances dataSet =
(Instances) data.getPayloadElement(data.getConnectionName());
if (dataSet == null) {
throw new WekaException("Data should not be null!");
}
if (dataSet.classAttribute() == null) {
getStepManager().logWarning("No class attribute set in the data");
m_classIsSet = false;
}
if (m_classIsSet && dataSet.classAttribute().isNumeric()) {
getStepManager().logWarning("Class is numeric");
m_classIsNominal = false;
}
Instances newDataSet = dataSet;
if (m_classIsSet && m_classIsNominal) {
newDataSet = assignClassValue(dataSet);
}
Data newData = new Data(data.getConnectionName());
newData.setPayloadElement(data.getConnectionName(), newDataSet);
getStepManager().outputData(newData);
getStepManager().finished();
}
/**
* Set the class value to be considered the 'positive' class
*
* @param dataSet the dataset to assign the class value for
* @return the altered dataset
* @throws WekaException if a problem occurs
*/
protected Instances assignClassValue(Instances dataSet) throws WekaException {
Attribute classAtt = dataSet.classAttribute();
int classValueIndex = classAtt.indexOfValue(m_classValue);
if (classValueIndex == -1) {
if (m_classValue.equalsIgnoreCase("last")
|| m_classValue.equalsIgnoreCase("/last")) {
classValueIndex = classAtt.numValues() - 1;
} else if (m_classValue.equalsIgnoreCase("first")
|| m_classValue.equalsIgnoreCase("/first")) {
classValueIndex = 0;
} else {
// try to parse as a number
String clV = m_classValue;
if (m_classValue.startsWith("/") && m_classValue.length() > 1) {
clV = clV.substring(1);
}
try {
classValueIndex = Integer.parseInt(clV);
classValueIndex--; // zero-based
} catch (NumberFormatException ex) {
}
}
}
if (classValueIndex < 0 || classValueIndex > classAtt.numValues() - 1) {
throw new WekaException("Class label/index '" + m_classValue
+ "' is unknown or out of range!");
}
if (classValueIndex != 0) {
try {
SwapValues sv = new SwapValues();
sv.setAttributeIndex("" + (dataSet.classIndex() + 1));
sv.setFirstValueIndex("first");
sv.setSecondValueIndex("" + (classValueIndex + 1));
sv.setInputFormat(dataSet);
Instances newDataSet = Filter.useFilter(dataSet, sv);
newDataSet.setRelationName(dataSet.relationName());
getStepManager().logBasic(
"New class value: " + newDataSet.classAttribute().value(0));
return newDataSet;
} catch (Exception ex) {
throw new WekaException(ex);
}
}
return dataSet;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
return new ArrayList<String>();
}
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
result.add(StepManager.CON_TRAININGSET);
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TESTSET) > 0) {
result.add(StepManager.CON_TESTSET);
}
return result;
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
m_classValue = getStepManager().environmentSubstitute(m_classValueS).trim();
if (!(connectionName.equals(StepManager.CON_DATASET)
|| connectionName.equals(StepManager.CON_TRAININGSET)
|| connectionName.equals(StepManager.CON_TESTSET) || connectionName
.equals(StepManager.CON_INSTANCE))
|| getStepManager().numIncomingConnections() == 0) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
Instances strucForDatasetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
// assignClass(strucForDatasetCon);
return strucForDatasetCon;
}
Instances strucForTestsetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_TESTSET);
if (strucForTestsetCon != null) {
// assignClass(strucForTestsetCon);
return strucForTestsetCon;
}
Instances strucForTrainingCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_TRAININGSET);
if (strucForTrainingCon != null) {
// assignClass(strucForTrainingCon);
return strucForTrainingCon;
}
Instances strucForInstanceCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_INSTANCE);
if (strucForInstanceCon != null) {
// assignClass(strucForInstanceCon);
return strucForInstanceCon;
}
return null;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ClassValuePickerStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Classifier.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Classifier.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.AbstractClassifier;
import weka.classifiers.UpdateableBatchProcessor;
import weka.classifiers.UpdateableClassifier;
import weka.classifiers.misc.InputMappedClassifier;
import weka.core.Drawable;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.OptionMetadata;
import weka.core.SerializationHelper;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.LoggingLevel;
import weka.knowledgeflow.SingleThreadedExecution;
import weka.knowledgeflow.StepManager;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.ObjectInputStream;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
/**
* Step that wraps a Weka classifier. Handles instance, trainingSet and testSet
* incoming connections. If the base classifier is Updateable, then it can be
* optionally updated incrementall on an incoming connection of type instance.
* Otherwise, instance connections are used for testing a classifier
* incrementally. In the case of a single incoming testSet connection it is
* assumed that the classifier has already been trained.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Classifier", category = "Classifiers",
toolTipText = "Weka classifier wrapper", iconPath = "",
resourceIntensive = true)
public class Classifier extends WekaAlgorithmWrapper implements
PairedDataHelper.PairedProcessor<weka.classifiers.Classifier> {
/** For serialization */
private static final long serialVersionUID = 8326706942962123155L;
/**
* The template for constructing concrete instances of the classifier to train
*/
protected weka.classifiers.Classifier m_classifierTemplate;
/**
* Holds the trained classifier in the case of single train/test pairs or
* instance stream connections
*/
protected weka.classifiers.Classifier m_trainedClassifier;
protected Instances m_trainedClassifierHeader;
/**
* Optional file to load a pre-trained model to score with (batch, or to score
* and update (incremental) in the case of testSet only (batch) or instance
* (incremental) connections
*/
protected File m_loadModelFileName = new File("");
/**
* True if we should resent an Updateable classifier at the start of
* processing for an incoming "instance" stream
*/
protected boolean m_resetIncrementalClassifier;
/**
* True if we should update an incremental classifier when there is a incoming
* "instance" stream
*/
protected boolean m_updateIncrementalClassifier = true;
/** True if we are processing streaming data */
protected boolean m_streaming;
/** True if the classifier in use is Updateable */
protected boolean m_classifierIsIncremental;
/** Handles train test pair routing and synchronization for us */
protected transient PairedDataHelper<weka.classifiers.Classifier> m_trainTestHelper;
/** Reusable data for incremental streaming classifiers */
protected Data m_incrementalData = new Data(
StepManager.CON_INCREMENTAL_CLASSIFIER);
/** True if we've been reset */
protected boolean m_isReset;
@Override
public Class getWrappedAlgorithmClass() {
return weka.classifiers.Classifier.class;
}
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultClassifier.gif";
}
/**
* Get the classifier to train
*
* @return the classifier to train
*/
public weka.classifiers.Classifier getClassifier() {
return (weka.classifiers.Classifier) getWrappedAlgorithm();
}
/**
* Set the classifier to train
*
* @param classifier the classifier to train
*/
@ProgrammaticProperty
public void setClassifier(weka.classifiers.Classifier classifier) {
setWrappedAlgorithm(classifier);
}
@Override
public void stepInit() throws WekaException {
try {
m_trainedClassifier = null;
m_trainedClassifierHeader = null;
m_trainTestHelper = null;
m_incrementalData = new Data(StepManager.CON_INCREMENTAL_CLASSIFIER);
m_classifierTemplate =
AbstractClassifier
.makeCopy((weka.classifiers.Classifier) getWrappedAlgorithm());
if (m_classifierTemplate instanceof EnvironmentHandler) {
((EnvironmentHandler) m_classifierTemplate)
.setEnvironment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
}
// Check to see if the classifier is one that must run single-threaded
Annotation a =
m_classifierTemplate.getClass().getAnnotation(
SingleThreadedExecution.class);
if (a != null) {
getStepManager().logBasic(
getClassifier().getClass().getCanonicalName() + " "
+ "will be executed in the single threaded executor");
getStepManager().setStepMustRunSingleThreaded(true);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
// create and initialize our train/test pair helper if necessary
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
m_trainTestHelper =
new PairedDataHelper<weka.classifiers.Classifier>(
this,
this,
StepManager.CON_TRAININGSET,
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) > 0 ? StepManager.CON_TESTSET
: null);
}
m_isReset = true;
m_classifierIsIncremental =
m_classifierTemplate instanceof UpdateableClassifier;
if (getLoadClassifierFileName() != null
&& getLoadClassifierFileName().toString().length() > 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) == 0) {
String resolvedFileName =
getStepManager().environmentSubstitute(
getLoadClassifierFileName().toString());
try {
getStepManager().logBasic("Loading classifier: " + resolvedFileName);
loadModel(resolvedFileName);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
if (m_trainedClassifier != null
&& getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0
&& !m_classifierIsIncremental) {
getStepManager().logWarning(
"Loaded classifier is not an incremental one "
+ "- will only be able to evaluate, and not update, on the incoming "
+ "instance stream.");
}
}
/**
* Get the name of the classifier to load at execution time. This only applies
* in the case where the only incoming connection is a test set connection
* (batch mode) or an instance connection (incremental mode).
*
* @return the name of the file to load the model from
*/
public File getLoadClassifierFileName() {
return m_loadModelFileName;
}
/**
* Set the name of the classifier to load at execution time. This only applies
* in the case where the only incoming connection is a test set connection
* (batch mode) or an instance connection (incremental mode).
*
* @param filename the name of the file to load the model from
*/
@OptionMetadata(
displayName = "Classifier model to load",
description = "Optional "
+ "Path to a classifier to load at execution time (only applies when using "
+ "testSet or instance connections)")
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = false)
public void setLoadClassifierFileName(File filename) {
m_loadModelFileName = filename;
}
/**
* Get whether to reset an incremental classifier at the start of an incoming
* instance stream
*
* @return true if the classifier should be reset
*/
public boolean getResetIncrementalClassifier() {
return m_resetIncrementalClassifier;
}
/**
* Set whether to reset an incremental classifier at the start of an incoming
* instance stream
*
* @param reset true if the classifier should be reset
*/
@OptionMetadata(
displayName = "Reset incremental classifier",
description = "Reset classifier (if it is incremental) at the start of the incoming "
+ "instance stream")
public
void setResetIncrementalClassifier(boolean reset) {
m_resetIncrementalClassifier = reset;
}
/**
* Get whether to update an incremental classifier on an incoming instance
* stream
*
* @return true if an incremental classifier should be updated
*/
public boolean getUpdateIncrementalClassifier() {
return m_updateIncrementalClassifier;
}
/**
* Set whether to update an incremental classifier on an incoming instance
* stream
*
* @param update true if an incremental classifier should be updated
*/
@OptionMetadata(
displayName = "Update incremental classifier",
description = " Update an incremental classifier on incoming instance stream")
public
void setUpdateIncrementalClassifier(boolean update) {
m_updateIncrementalClassifier = update;
}
@Override
public void processIncoming(Data data) throws WekaException {
try {
getStepManager().processing();
if (m_isReset) {
m_isReset = false;
Instances incomingStructure = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
incomingStructure =
new Instances(
((Instance) data.getPayloadElement(StepManager.CON_INSTANCE))
.dataset(),
0);
} else {
incomingStructure =
(Instances) data.getPayloadElement(data.getConnectionName());
}
if (incomingStructure.classAttribute() == null) {
getStepManager()
.logWarning(
"No class index is set in the data - using last attribute as class");
incomingStructure
.setClassIndex(incomingStructure.numAttributes() - 1);
}
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
m_streaming = true;
if (m_trainedClassifier == null) {
m_trainedClassifier =
weka.classifiers.AbstractClassifier
.makeCopy(m_classifierTemplate);
getStepManager().logBasic("Initialising incremental classifier");
m_trainedClassifier.buildClassifier(incomingStructure);
m_trainedClassifierHeader = incomingStructure;
} else if (m_resetIncrementalClassifier && m_classifierIsIncremental) {
// make a copy here, just in case buildClassifier() implementations
// do not re-initialize the classifier correctly
m_trainedClassifier =
weka.classifiers.AbstractClassifier
.makeCopy(m_classifierTemplate);
m_trainedClassifierHeader = incomingStructure;
getStepManager().logBasic("Resetting incremental classifier");
m_trainedClassifier.buildClassifier(m_trainedClassifierHeader);
}
getStepManager()
.logBasic(
m_updateIncrementalClassifier && m_classifierIsIncremental ? "Training incrementally"
: "Predicting incrementally");
} else if (data.getConnectionName().equals(StepManager.CON_TRAININGSET)) {
m_trainedClassifierHeader = incomingStructure;
} else if (data.getConnectionName().equals(StepManager.CON_TESTSET)
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) == 0
&& m_classifierTemplate instanceof InputMappedClassifier) {
m_trainedClassifier =
weka.classifiers.AbstractClassifier.makeCopy(m_classifierTemplate);
// force the InputMappedClassifier to load a model (if one has been
// configured)
((InputMappedClassifier) m_trainedClassifier).getModelHeader(null);
}
if (m_trainedClassifierHeader != null
&& !incomingStructure.equalHeaders(m_trainedClassifierHeader)) {
if (!(m_trainedClassifier instanceof InputMappedClassifier)) {
throw new WekaException(
"Structure of incoming data does not match "
+ "that of the trained classifier");
}
}
}
if (m_streaming) {
processStreaming(data);
} else if (m_trainTestHelper != null) {
// train test pairs
m_trainTestHelper.process(data);
} else {
// test only
processOnlyTestSet(data);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a training split (primary data handled by the PairedDataHelper)
*
* @param setNum the number of this split/fold
* @param maxSetNum the maximum number of splits/folds in the group
* @param data the actual split/fold data
* @param helper the PairedDataHelper managing the paired data
* @return a Classifier trained on this training split
* @throws WekaException if a problem occurs
*/
@Override
public weka.classifiers.Classifier processPrimary(Integer setNum,
Integer maxSetNum, Data data,
PairedDataHelper<weka.classifiers.Classifier> helper) throws WekaException {
Instances trainingData = data.getPrimaryPayload();
if (m_trainedClassifierHeader == null) {
m_trainedClassifierHeader = new Instances(trainingData, 0);
}
try {
weka.classifiers.Classifier classifier =
AbstractClassifier.makeCopy(m_classifierTemplate);
String classifierDesc = classifier.getClass().getCanonicalName();
classifierDesc =
classifierDesc.substring(classifierDesc.lastIndexOf(".") + 1);
if (classifier instanceof OptionHandler) {
String optsString =
Utils.joinOptions(((OptionHandler) classifier).getOptions());
classifierDesc += " " + optsString;
}
if (classifier instanceof EnvironmentHandler) {
((EnvironmentHandler) classifier).setEnvironment(getStepManager()
.getExecutionEnvironment().getEnvironmentVariables());
}
// retain the training data
helper
.addIndexedValueToNamedStore("trainingSplits", setNum, trainingData);
if (!isStopRequested()) {
getStepManager().logBasic(
"Building " + classifierDesc + " on " + trainingData.relationName()
+ " for fold/set " + setNum + " out of " + maxSetNum);
if (getStepManager().getLoggingLevel().ordinal() > LoggingLevel.LOW
.ordinal()) {
getStepManager().statusMessage(
"Building " + classifierDesc + " on fold/set " + setNum);
}
if (maxSetNum == 1) {
// single train/test split - makes sense to retain this trained
// classifier
m_trainedClassifier = classifier;
}
classifier.buildClassifier((Instances) trainingData);
getStepManager().logDetailed(
"Finished building " + classifierDesc + "on "
+ trainingData.relationName() + " for fold/set " + setNum
+ " out of " + maxSetNum);
outputTextData(classifier, setNum);
outputGraphData(classifier, setNum);
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TESTSET) == 0) {
// output a batch classifier for just the trained model
Data batchClassifier =
new Data(StepManager.CON_BATCH_CLASSIFIER, classifier);
batchClassifier.setPayloadElement(
StepManager.CON_AUX_DATA_TRAININGSET, trainingData);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM,
setNum);
batchClassifier.setPayloadElement(
StepManager.CON_AUX_DATA_MAX_SET_NUM, maxSetNum);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_LABEL,
getName());
getStepManager().outputData(batchClassifier);
}
}
return classifier;
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a test split/fold (secondary data handled by PairedDataHelper)
*
* @param setNum the set number of this split/fold
* @param maxSetNum the maximum number of splits/folds in the group
* @param data the actual split/fold data
* @param helper the PairedDataHelper managing the paried data
* @throws WekaException if a problem occurs
*/
@Override
public void processSecondary(Integer setNum, Integer maxSetNum, Data data,
PairedDataHelper<weka.classifiers.Classifier> helper) throws WekaException {
// trained classifier for this set number
weka.classifiers.Classifier classifier =
helper.getIndexedPrimaryResult(setNum);
// test data
Instances testSplit = data.getPrimaryPayload();
if (m_trainedClassifierHeader != null
&& !testSplit.equalHeaders(m_trainedClassifierHeader)) {
if (!(m_trainedClassifier instanceof InputMappedClassifier)) {
throw new WekaException(
"Structure of incoming data does not match "
+ "that of the trained classifier");
}
}
// paired training data
Instances trainingSplit =
helper.getIndexedValueFromNamedStore("trainingSplits", setNum);
getStepManager().logBasic(
"Dispatching model for set " + setNum + " out of " + maxSetNum
+ " to output");
Data batchClassifier =
new Data(StepManager.CON_BATCH_CLASSIFIER, classifier);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET,
trainingSplit);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_TESTSET,
testSplit);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
batchClassifier
.setPayloadElement(StepManager.CON_AUX_DATA_LABEL, getName());
getStepManager().outputData(batchClassifier);
}
/**
* Process a Data object in the case where we only have an incoming testSet
* connection
*
* @param data the Data object to process
* @throws WekaException if a problem occurs
*/
protected void processOnlyTestSet(Data data) throws WekaException {
// avoid any potential thread safety issues...
try {
weka.classifiers.Classifier tempToTest =
weka.classifiers.AbstractClassifier.makeCopy(m_trainedClassifier);
Data batchClassifier = new Data(StepManager.CON_BATCH_CLASSIFIER);
batchClassifier.setPayloadElement(StepManager.CON_BATCH_CLASSIFIER,
tempToTest);
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_TESTSET,
data.getPayloadElement(StepManager.CON_TESTSET));
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1));
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1));
batchClassifier.setPayloadElement(StepManager.CON_AUX_DATA_LABEL,
getName());
getStepManager().outputData(batchClassifier);
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().finished();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a Data object in the case of an incoming instance (streaming)
* connection
*
* @param data the Data object to process
* @throws WekaException if a problem occurs
*/
protected void processStreaming(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
Instance inst = (Instance) data.getPayloadElement(StepManager.CON_INSTANCE);
if (getStepManager().isStreamFinished(data)) {
// finished
if (m_trainedClassifier instanceof UpdateableBatchProcessor) {
try {
((UpdateableBatchProcessor) m_trainedClassifier).batchFinished();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
// notify any downstream steps consuming incremental classifier
// data that the stream has finished
m_incrementalData.setPayloadElement(
StepManager.CON_INCREMENTAL_CLASSIFIER, m_trainedClassifier);
m_incrementalData.setPayloadElement(
StepManager.CON_AUX_DATA_TEST_INSTANCE, null);
// getStepManager().outputData(m_incrementalData);
outputTextData(m_trainedClassifier, -1);
outputGraphData(m_trainedClassifier, 0);
if (!isStopRequested()) {
getStepManager().throughputFinished(m_incrementalData);
}
return;
}
// test on the instance
m_incrementalData.setPayloadElement(StepManager.CON_AUX_DATA_TEST_INSTANCE,
inst);
m_incrementalData.setPayloadElement(StepManager.CON_INCREMENTAL_CLASSIFIER,
m_trainedClassifier);
getStepManager().outputData(m_incrementalData.getConnectionName(),
m_incrementalData);
// train on the instance?
getStepManager().throughputUpdateStart();
if (m_classifierIsIncremental && m_updateIncrementalClassifier) {
if (!inst.classIsMissing()) {
try {
((UpdateableClassifier) m_trainedClassifier).updateClassifier(inst);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
getStepManager().throughputUpdateEnd();
}
/**
* Output a Data object containing a textual description of a model to any
* outgoing text connections
*
* @param classifier the classifier to get the textual description of
* @param setNum the set number of the training data
* @throws WekaException if a problem occurs
*/
protected void outputTextData(weka.classifiers.Classifier classifier,
int setNum) throws WekaException {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_TEXT) == 0) {
return;
}
Data textData = new Data(StepManager.CON_TEXT);
String modelString = classifier.toString();
String titleString = classifier.getClass().getName();
titleString =
titleString.substring(titleString.lastIndexOf('.') + 1,
titleString.length());
modelString =
"=== Classifier model ===\n\n" + "Scheme: " + titleString + "\n"
+ "Relation: " + m_trainedClassifierHeader.relationName() + "\n\n"
+ modelString;
titleString = "Model: " + titleString;
textData.setPayloadElement(StepManager.CON_TEXT, modelString);
textData
.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, titleString);
if (setNum != -1) {
textData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
}
getStepManager().outputData(textData);
}
/**
* Output a Data object containing a dot graph, if the model is Drawable and
* we have downstream steps receiving graph connections.
*
* @param classifier the classifier to generate the graph from
* @param setNum the set number of the data used to generate the graph
* @throws WekaException if a problem occurs
*/
protected void outputGraphData(weka.classifiers.Classifier classifier,
int setNum) throws WekaException {
if (classifier instanceof Drawable) {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_GRAPH) == 0) {
return;
}
try {
String graphString = ((Drawable) classifier).graph();
int graphType = ((Drawable) classifier).graphType();
String grphTitle = classifier.getClass().getCanonicalName();
grphTitle =
grphTitle.substring(grphTitle.lastIndexOf('.') + 1,
grphTitle.length());
grphTitle =
"Set " + setNum + " (" + m_trainedClassifierHeader.relationName()
+ ") " + grphTitle;
Data graphData = new Data(StepManager.CON_GRAPH);
graphData.setPayloadElement(StepManager.CON_GRAPH, graphString);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TITLE,
grphTitle);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TYPE,
graphType);
getStepManager().outputData(graphData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
int numTraining =
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstance =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numTraining == 0 && numTesting == 0) {
result.add(StepManager.CON_INSTANCE);
}
if (numInstance == 0 && numTraining == 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (numInstance == 0 && numTesting == 0) {
result.add(StepManager.CON_TESTSET);
}
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_INFO) == 0) {
result.add(StepManager.CON_INFO);
}
return result;
}
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 0) {
int numTraining =
getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstance =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numInstance > 0) {
result.add(StepManager.CON_INCREMENTAL_CLASSIFIER);
} else if (numTraining > 0 || numTesting > 0) {
result.add(StepManager.CON_BATCH_CLASSIFIER);
}
result.add(StepManager.CON_TEXT);
if (getClassifier() instanceof Drawable && numTraining > 0) {
result.add(StepManager.CON_GRAPH);
}
}
// info connection - downstream steps can get our wrapped classifier
// for information (configuration) purposes
result.add(StepManager.CON_INFO);
return result;
}
/**
* Load a pre-trained model from the supplied path
*
* @param filePath the path to load the model from
* @throws Exception if a problem occurs
*/
protected void loadModel(String filePath) throws Exception {
ObjectInputStream is = null;
try {
is =
SerializationHelper.getObjectInputStream(new FileInputStream(new File(
filePath)));
m_trainedClassifier = (weka.classifiers.Classifier) is.readObject();
if (!(m_trainedClassifier.getClass().getCanonicalName()
.equals(getClassifier().getClass().getCanonicalName()))) {
throw new Exception("The loaded model '"
+ m_trainedClassifier.getClass().getCanonicalName() + "' is not a '"
+ getClassifier().getClass().getCanonicalName() + "'");
}
// try and read the header
try {
m_trainedClassifierHeader = (Instances) is.readObject();
} catch (Exception ex) {
getStepManager().logWarning(
"Model file '" + filePath
+ "' does not seem to contain an Instances header");
}
} finally {
if (is != null) {
is.close();
}
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ClassifierPerformanceEvaluator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ClassifierPerformanceEvaluator.java
* Copyright (C) 2002-2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.AggregateableEvaluation;
import weka.classifiers.CostMatrix;
import weka.classifiers.Evaluation;
import weka.classifiers.evaluation.ThresholdCurve;
import weka.core.BatchPredictor;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.OptionMetadata;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.explorer.ClassifierErrorsPlotInstances;
import weka.gui.explorer.ExplorerDefaults;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.visualize.PlotData2D;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.ExecutionResult;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepTask;
import weka.knowledgeflow.StepTaskCallback;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Step that implements batch classifier evaluation
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ClassifierPerformanceEvaluator", category = "Evaluation",
toolTipText = "Evaluates batch classifiers",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ClassifierPerformanceEvaluator.gif")
public class ClassifierPerformanceEvaluator extends BaseStep {
private static final long serialVersionUID = -2679292079974676672L;
/**
* Evaluation object used for evaluating a classifier
*/
private transient AggregateableEvaluation m_eval;
/** Plotting instances */
private transient Instances m_aggregatedPlotInstances = null;
/** Sizes of points in plotting data */
private transient ArrayList<Object> m_aggregatedPlotSizes = null;
/** Plotting shapes */
private transient ArrayList<Integer> m_aggregatedPlotShapes = null;
/**
* True if plot point sizes are to be rendered proportional to the size of the
* prediction margin
*/
protected boolean m_errorPlotPointSizeProportionalToMargin;
/** True to perform cost sensitive evaluation */
protected boolean m_costSensitiveEval;
/** The cost matrix (string form) */
protected String m_costString = "";
/** The cost matrix */
protected CostMatrix m_matrix;
/** Evaluation metrics to output */
protected String m_selectedEvalMetrics = "";
/** Holds a list of metric names */
protected List<String> m_metricsList = new ArrayList<String>();
/** True if the step has been reset */
protected boolean m_isReset;
/** For counting down the sets left to process */
protected AtomicInteger m_setsToGo;
/** The maximum set number in the batch of sets being processed */
protected int m_maxSetNum;
protected AtomicInteger m_taskCount;
protected void stringToList(String l) {
if (l != null && l.length() > 0) {
String[] parts = l.split(",");
m_metricsList.clear();
for (String s : parts) {
m_metricsList.add(s.trim());
}
}
}
/**
* Get whether the size of plot data points will be proportional to the
* prediction margin
*
* @return true if plot data points will be rendered proportional to the size
* of the prediction margin
*/
@OptionMetadata(displayName = "Error plot point size proportional to margin",
description = "Set the point size proportional to the prediction "
+ "margin for classification error plots")
public boolean getErrorPlotPointSizeProportionalToMargin() {
return m_errorPlotPointSizeProportionalToMargin;
}
/**
* Set whether the size of plot data points will be proportional to the
* prediction margin
*
* @param e true if plot data points will be rendered proportional to the size
* of the prediction margin
*/
public void setErrorPlotPointSizeProportionalToMargin(boolean e) {
m_errorPlotPointSizeProportionalToMargin = e;
}
/**
* Get the evaluation metrics to output (as a comma-separated list).
*
* @return the evaluation metrics to output
*/
@ProgrammaticProperty
public String getEvaluationMetricsToOutput() {
return m_selectedEvalMetrics;
}
/**
* Set the evaluation metrics to output (as a comma-separated list).
*
* @param m the evaluation metrics to output
*/
public void setEvaluationMetricsToOutput(String m) {
m_selectedEvalMetrics = m;
stringToList(m);
}
/**
* Set whether to evaluate with respoect to costs
*
* @param useCosts true to use cost-sensitive evaluation
*/
@ProgrammaticProperty
public void setEvaluateWithRespectToCosts(boolean useCosts) {
m_costSensitiveEval = useCosts;
}
/**
* Get whether to evaluate with respoect to costs
*
* @return true to use cost-sensitive evaluation
*/
public boolean getEvaluateWithRespectToCosts() {
return m_costSensitiveEval;
}
/**
* Set the cost matrix to use as a string
*
* @param cms the cost matrix to use
*/
@ProgrammaticProperty
public void setCostMatrixString(String cms) {
m_costString = cms;
}
/**
* Get the cost matrix to use as a string
*
* @return the cost matrix
*/
public String getCostMatrixString() {
return m_costString;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_BATCH_CLASSIFIER) == 0) {
result.add(StepManager.CON_BATCH_CLASSIFIER);
}
return result;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 0) {
result.add(StepManager.CON_TEXT);
result.add(StepManager.CON_THRESHOLD_DATA);
result.add(StepManager.CON_VISUALIZABLE_ERROR);
}
return result;
}
/**
* Constructor
*/
public ClassifierPerformanceEvaluator() {
super();
m_metricsList = Evaluation.getAllEvaluationMetricNames();
m_metricsList.remove("Coverage");
m_metricsList.remove("Region size");
StringBuilder b = new StringBuilder();
for (String s : m_metricsList) {
b.append(s).append(",");
}
m_selectedEvalMetrics = b.substring(0, b.length() - 1);
}
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_PlotInstances = null;
m_aggregatedPlotInstances = null;
m_taskCount = new AtomicInteger(0);
if (m_costSensitiveEval && m_costString != null
&& m_costString.length() > 0) {
try {
m_matrix = CostMatrix.parseMatlab(getCostMatrixString());
} catch (Exception e) {
throw new WekaException(e);
}
}
}
@Override
public void stop() {
super.stop();
if ((m_taskCount == null || m_taskCount.get() == 0) && isStopRequested()) {
getStepManager().interrupted();
}
}
/** for generating plottable instance with predictions appended. */
private transient ClassifierErrorsPlotInstances m_PlotInstances = null;
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
try {
int setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Instances trainingData =
(Instances) data
.getPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET);
Instances testData =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TESTSET);
if (testData == null || testData.numInstances() == 0) {
// can't evaluate empty/non-existent test instances
getStepManager().logDetailed(
"No test set available - unable to evaluate");
return;
}
weka.classifiers.Classifier classifier =
(weka.classifiers.Classifier) data
.getPayloadElement(StepManager.CON_BATCH_CLASSIFIER);
String evalLabel =
data.getPayloadElement(StepManager.CON_AUX_DATA_LABEL).toString();
if (classifier == null) {
throw new WekaException("Classifier is null!!");
}
if (m_isReset) {
m_isReset = false;
getStepManager().processing();
m_maxSetNum =
(Integer) data
.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
m_setsToGo = new AtomicInteger(0);
if (trainingData == null) {
// no training data to estimate majority class/mean target from
Evaluation eval =
new Evaluation(testData, m_costSensitiveEval ? m_matrix : null);
m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
m_PlotInstances.setInstances(testData);
m_PlotInstances.setClassifier(classifier);
m_PlotInstances.setClassIndex(testData.classIndex());
m_PlotInstances.setEvaluation(eval);
eval =
adjustForInputMappedClassifier(eval, classifier, testData,
m_PlotInstances, m_costSensitiveEval ? m_matrix : null);
eval.useNoPriors();
m_eval = new AggregateableEvaluation(eval);
m_eval.setMetricsToDisplay(m_metricsList);
} else {
Evaluation eval =
new Evaluation(trainingData, m_costSensitiveEval ? m_matrix : null);
m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
m_PlotInstances.setInstances(trainingData);
m_PlotInstances.setClassifier(classifier);
m_PlotInstances.setClassIndex(trainingData.classIndex());
m_PlotInstances.setEvaluation(eval);
eval =
adjustForInputMappedClassifier(eval, classifier, trainingData,
m_PlotInstances, m_costSensitiveEval ? m_matrix : null);
m_eval = new AggregateableEvaluation(eval);
m_eval.setMetricsToDisplay(m_metricsList);
}
m_PlotInstances.setUp();
m_aggregatedPlotInstances = null;
}
if (!isStopRequested()) {
getStepManager().logBasic(
"Scheduling evaluation of fold/set " + setNum + " for execution");
// submit the task
EvaluationTask evalTask =
new EvaluationTask(this, classifier, trainingData, testData, setNum,
m_metricsList, getErrorPlotPointSizeProportionalToMargin(),
evalLabel, new EvaluationCallback(), m_costSensitiveEval ? m_matrix
: null);
getStepManager().getExecutionEnvironment().submitTask(evalTask);
m_taskCount.incrementAndGet();
} else {
getStepManager().interrupted();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Aggregates a single evaluation task into the overall evaluation
*
* @param eval the partial evaluation to aggregate
* @param classifier the classifier used for evaluation
* @param testData the test data evaluated on
* @param plotInstances plotting instances
* @param setNum the set number processed
* @param evalLabel evaluation type
* @throws Exception if a problem occurs
*/
protected synchronized void aggregateEvalTask(Evaluation eval,
weka.classifiers.Classifier classifier, Instances testData,
ClassifierErrorsPlotInstances plotInstances, int setNum, String evalLabel)
throws Exception {
m_eval.aggregate(eval);
if (m_aggregatedPlotInstances == null) {
// get these first so that the post-processing does not scale the sizes!!
m_aggregatedPlotShapes =
(ArrayList<Integer>) plotInstances.getPlotShapes().clone();
m_aggregatedPlotSizes =
(ArrayList<Object>) plotInstances.getPlotSizes().clone();
// this calls the post-processing, so do this last
m_aggregatedPlotInstances =
new Instances(plotInstances.getPlotInstances());
} else {
// get these first so that post-processing does not scale sizes
ArrayList<Object> tmpSizes =
(ArrayList<Object>) plotInstances.getPlotSizes().clone();
ArrayList<Integer> tmpShapes =
(ArrayList<Integer>) plotInstances.getPlotShapes().clone();
Instances temp = plotInstances.getPlotInstances();
for (int i = 0; i < temp.numInstances(); i++) {
m_aggregatedPlotInstances.add(temp.get(i));
m_aggregatedPlotShapes.add(tmpShapes.get(i));
m_aggregatedPlotSizes.add(tmpSizes.get(i));
}
}
getStepManager().statusMessage(
"Completed folds/sets " + m_setsToGo.incrementAndGet());
if (m_setsToGo.get() == m_maxSetNum) {
AggregateableClassifierErrorsPlotInstances aggPlot =
new AggregateableClassifierErrorsPlotInstances();
aggPlot.setInstances(testData);
aggPlot.setPlotInstances(m_aggregatedPlotInstances);
aggPlot.setPlotShapes(m_aggregatedPlotShapes);
aggPlot.setPlotSizes(m_aggregatedPlotSizes);
aggPlot
.setPointSizeProportionalToMargin(m_errorPlotPointSizeProportionalToMargin);
// triggers scaling of shape sizes
aggPlot.getPlotInstances();
String textTitle = "";
textTitle += classifier.getClass().getName();
String textOptions = "";
if (classifier instanceof OptionHandler) {
textOptions =
Utils.joinOptions(((OptionHandler) classifier).getOptions());
}
textTitle =
textTitle.substring(textTitle.lastIndexOf('.') + 1, textTitle.length());
if (evalLabel != null && evalLabel.length() > 0) {
if (!textTitle.toLowerCase().startsWith(evalLabel.toLowerCase())) {
textTitle = evalLabel + " : " + textTitle;
}
}
CostMatrix cm =
m_costSensitiveEval ? CostMatrix.parseMatlab(getCostMatrixString())
: null;
String resultT =
"=== Evaluation result ===\n\n"
+ "Scheme: "
+ textTitle
+ "\n"
+ ((textOptions.length() > 0) ? "Options: " + textOptions + "\n" : "")
+ "Relation: " + testData.relationName() + "\n\n"
+ (cm != null ? "Cost matrix:\n" + cm.toString() + "\n" : "")
+ m_eval.toSummaryString();
if (testData.classAttribute().isNominal()) {
resultT +=
"\n" + m_eval.toClassDetailsString() + "\n" + m_eval.toMatrixString();
}
Data text = new Data(StepManager.CON_TEXT);
text.setPayloadElement(StepManager.CON_TEXT, resultT);
text.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, textTitle);
getStepManager().outputData(text);
// set up visualizable errors
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_VISUALIZABLE_ERROR) > 0) {
PlotData2D errorD = new PlotData2D(m_aggregatedPlotInstances);
errorD.setShapeSize(m_aggregatedPlotSizes);
errorD.setShapeType(m_aggregatedPlotShapes);
errorD.setPlotName(textTitle + " " + textOptions);
Data visErr = new Data(StepManager.CON_VISUALIZABLE_ERROR);
visErr.setPayloadElement(StepManager.CON_VISUALIZABLE_ERROR, errorD);
getStepManager().outputData(visErr);
}
// threshold data
if (testData.classAttribute().isNominal()
&& getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_THRESHOLD_DATA) > 0) {
ThresholdCurve tc = new ThresholdCurve();
Instances result = tc.getCurve(m_eval.predictions(), 0);
result.setRelationName(testData.relationName());
PlotData2D pd = new PlotData2D(result);
String htmlTitle = "<html><font size=-2>" + textTitle;
String newOptions = "";
if (classifier instanceof OptionHandler) {
String[] options = ((OptionHandler) classifier).getOptions();
if (options.length > 0) {
for (int ii = 0; ii < options.length; ii++) {
if (options[ii].length() == 0) {
continue;
}
if (options[ii].charAt(0) == '-'
&& !(options[ii].charAt(1) >= '0' && options[ii].charAt(1) <= '9')) {
newOptions += "<br>";
}
newOptions += options[ii];
}
}
}
htmlTitle +=
" " + newOptions + "<br>" + " (class: "
+ testData.classAttribute().value(0) + ")" + "</font></html>";
pd.setPlotName(textTitle + " (class: "
+ testData.classAttribute().value(0) + ")");
pd.setPlotNameHTML(htmlTitle);
boolean[] connectPoints = new boolean[result.numInstances()];
for (int jj = 1; jj < connectPoints.length; jj++) {
connectPoints[jj] = true;
}
pd.setConnectPoints(connectPoints);
Data threshData = new Data(StepManager.CON_THRESHOLD_DATA);
threshData.setPayloadElement(StepManager.CON_THRESHOLD_DATA, pd);
threshData.setPayloadElement(StepManager.CON_AUX_DATA_CLASS_ATTRIBUTE,
testData.classAttribute());
getStepManager().outputData(threshData);
}
getStepManager().finished();
}
if (isStopRequested()) {
getStepManager().interrupted();
}
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ClassifierPerformanceEvaluatorStepEditorDialog";
}
/**
* Adjust evaluation configuration if an {@code InputMappedClassifier} is
* being used
*
* @param eval the evaluation object ot adjust
* @param classifier the classifier being used
* @param inst the instances being evaluated on
* @param plotInstances plotting instances
* @param matrix the CostMatrix to use, or null for no cost-sensitive
* evaluation
* @return the adjusted {@code Evaluation} object
* @throws Exception if a problem occurs
*/
protected static Evaluation adjustForInputMappedClassifier(Evaluation eval,
weka.classifiers.Classifier classifier, Instances inst,
ClassifierErrorsPlotInstances plotInstances, CostMatrix matrix)
throws Exception {
if (classifier instanceof weka.classifiers.misc.InputMappedClassifier) {
Instances mappedClassifierHeader =
((weka.classifiers.misc.InputMappedClassifier) classifier)
.getModelHeader(new Instances(inst, 0));
eval = new Evaluation(new Instances(mappedClassifierHeader, 0));
if (!eval.getHeader().equalHeaders(inst)) {
// When the InputMappedClassifier is loading a model,
// we need to make a new dataset that maps the test instances to
// the structure expected by the mapped classifier - this is only
// to ensure that the ClassifierPlotInstances object is configured
// in accordance with what the embeded classifier was trained with
Instances mappedClassifierDataset =
((weka.classifiers.misc.InputMappedClassifier) classifier)
.getModelHeader(new Instances(mappedClassifierHeader, 0));
for (int zz = 0; zz < inst.numInstances(); zz++) {
Instance mapped =
((weka.classifiers.misc.InputMappedClassifier) classifier)
.constructMappedInstance(inst.instance(zz));
mappedClassifierDataset.add(mapped);
}
eval.setPriors(mappedClassifierDataset);
plotInstances.setInstances(mappedClassifierDataset);
plotInstances.setClassifier(classifier);
plotInstances.setClassIndex(mappedClassifierDataset.classIndex());
plotInstances.setEvaluation(eval);
}
}
return eval;
}
/**
* Subclass of ClassifierErrorsPlotInstances to allow plot point sizes to be
* scaled according to global min/max values.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
protected static class AggregateableClassifierErrorsPlotInstances extends
ClassifierErrorsPlotInstances {
/**
* For serialization
*/
private static final long serialVersionUID = 2012744784036684168L;
/**
* Set the vector of plot shapes to use;
*
* @param plotShapes
*/
@Override
public void setPlotShapes(ArrayList<Integer> plotShapes) {
m_PlotShapes = plotShapes;
}
/**
* Set the vector of plot sizes to use
*
* @param plotSizes the plot sizes to use
*/
@Override
public void setPlotSizes(ArrayList<Object> plotSizes) {
m_PlotSizes = plotSizes;
}
public void setPlotInstances(Instances inst) {
m_PlotInstances = inst;
}
@Override
protected void finishUp() {
m_FinishUpCalled = true;
if (!m_SaveForVisualization) {
return;
}
if (m_Instances.classAttribute().isNumeric()
|| m_pointSizeProportionalToMargin) {
scaleNumericPredictions();
}
}
}
/**
* Class that performs the actual evaluation of a set/fold
*/
protected static class EvaluationTask extends StepTask<Object[]> {
private static final long serialVersionUID = -686972773536075889L;
protected weka.classifiers.Classifier m_classifier;
protected CostMatrix m_cMatrix;
protected Instances m_trainData;
protected Instances m_testData;
protected int m_setNum;
protected List<String> m_metricsList;
protected boolean m_errPlotPtSizePropToMarg;
protected String m_evalLabel;
protected String m_classifierDesc = "";
public EvaluationTask(Step source, weka.classifiers.Classifier classifier,
Instances trainData, Instances testData, int setNum,
List<String> metricsList, boolean errPlotPtSizePropToMarg,
String evalLabel, EvaluationCallback callback, CostMatrix matrix) {
super(source, callback);
m_classifier = classifier;
m_cMatrix = matrix;
m_trainData = trainData;
m_testData = testData;
m_setNum = setNum;
m_metricsList = metricsList;
m_errPlotPtSizePropToMarg = errPlotPtSizePropToMarg;
m_evalLabel = evalLabel;
m_classifierDesc = m_classifier.getClass().getCanonicalName();
m_classifierDesc =
m_classifierDesc.substring(m_classifierDesc.lastIndexOf(".") + 1);
if (m_classifier instanceof OptionHandler) {
String optsString =
Utils.joinOptions(((OptionHandler) m_classifier).getOptions());
m_classifierDesc += " " + optsString;
}
}
@Override
public void process() throws Exception {
Object[] r = new Object[6];
r[4] = m_setNum;
getExecutionResult().setResult(r);
getLogHandler().statusMessage(
"Evaluating " + m_classifierDesc + " on fold/set " + m_setNum);
getLogHandler().logDetailed(
"Evaluating " + m_classifierDesc + " on " + m_testData.relationName()
+ " fold/set " + m_setNum);
ClassifierErrorsPlotInstances plotInstances =
ExplorerDefaults.getClassifierErrorsPlotInstances();
Evaluation eval = null;
if (m_trainData == null) {
eval = new Evaluation(m_testData, m_cMatrix);
plotInstances.setInstances(m_testData);
plotInstances.setClassifier(m_classifier);
plotInstances.setClassIndex(m_testData.classIndex());
plotInstances.setEvaluation(eval);
plotInstances
.setPointSizeProportionalToMargin(m_errPlotPtSizePropToMarg);
eval =
adjustForInputMappedClassifier(eval, m_classifier, m_testData,
plotInstances, m_cMatrix);
eval.useNoPriors();
eval.setMetricsToDisplay(m_metricsList);
} else {
eval = new Evaluation(m_trainData, m_cMatrix);
plotInstances.setInstances(m_trainData);
plotInstances.setClassifier(m_classifier);
plotInstances.setClassIndex(m_trainData.classIndex());
plotInstances.setEvaluation(eval);
plotInstances
.setPointSizeProportionalToMargin(m_errPlotPtSizePropToMarg);
eval =
adjustForInputMappedClassifier(eval, m_classifier, m_trainData,
plotInstances, m_cMatrix);
eval.setMetricsToDisplay(m_metricsList);
}
plotInstances.setUp();
if (m_classifier instanceof BatchPredictor
&& ((BatchPredictor) m_classifier)
.implementsMoreEfficientBatchPrediction()) {
double[][] predictions =
((BatchPredictor) m_classifier).distributionsForInstances(m_testData);
plotInstances.process(m_testData, predictions, eval);
} else {
for (int i = 0; i < m_testData.numInstances(); i++) {
Instance temp = m_testData.instance(i);
plotInstances.process(temp, m_classifier, eval);
}
}
r[0] = eval;
r[1] = m_classifier;
r[2] = m_testData;
r[3] = plotInstances;
r[5] = m_evalLabel;
}
}
/**
* Callback that gets notified when an evaluation task completes. Passes on
* the partial evaluation results to be aggregated with the overall results
*/
protected class EvaluationCallback implements StepTaskCallback<Object[]> {
@Override
public void taskFinished(ExecutionResult<Object[]> result) throws Exception {
if (!isStopRequested()) {
Evaluation eval = (Evaluation) result.getResult()[0];
weka.classifiers.Classifier classifier =
(weka.classifiers.Classifier) result.getResult()[1];
Instances testData = (Instances) result.getResult()[2];
ClassifierErrorsPlotInstances plotInstances =
(ClassifierErrorsPlotInstances) result.getResult()[3];
int setNum = (Integer) result.getResult()[4];
String evalLabel = result.getResult()[5].toString();
aggregateEvalTask(eval, classifier, testData, plotInstances, setNum,
evalLabel);
} else {
getStepManager().interrupted();
}
m_taskCount.decrementAndGet();
}
@Override
public void taskFailed(StepTask<Object[]> failedTask,
ExecutionResult<Object[]> failedResult) throws Exception {
Integer setNum = (Integer) failedResult.getResult()[4];
getStepManager().logError("Evaluation for fold " + setNum + " failed",
failedResult.getError());
m_taskCount.decrementAndGet();
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Clusterer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Clusterer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Drawable;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.OptionMetadata;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.ObjectInputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Step that wraps a Weka clusterer. Handles trainingSet and testSet incoming
* connections
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Clusterer", category = "Clusterers",
toolTipText = "Weka clusterer wrapper", iconPath = "",
resourceIntensive = true)
public class Clusterer extends WekaAlgorithmWrapper implements
PairedDataHelper.PairedProcessor<weka.clusterers.Clusterer> {
private static final long serialVersionUID = 3275754421525338036L;
/**
* The template for constructing concrete instances of the clusterer to train
*/
protected weka.clusterers.Clusterer m_clustererTemplate;
/** Holds a trained/loaded clusterer */
protected weka.clusterers.Clusterer m_trainedClusterer;
/** Header used to train the clusterer */
protected Instances m_trainedClustererHeader;
/** Handles train test pair routing and synchronization for us */
protected transient PairedDataHelper<weka.clusterers.Clusterer> m_trainTestHelper;
/**
* Optional file to load a pre-trained model to score with (batch, or to score
* and update (incremental) in the case of testSet only (batch) or instance
* (incremental) connections
*/
protected File m_loadModelFileName = new File("");
/** True if the step has just been reset */
protected boolean m_isReset;
/** Re-usable Data object for incrementalClusterer output */
protected Data m_incrementalData;
/** True if there is a single incoming "instance" connection */
protected boolean m_streaming;
/**
* Get the clusterer to train
*
* @return the clusterer to train
*/
public weka.clusterers.Clusterer getClusterer() {
return (weka.clusterers.Clusterer) getWrappedAlgorithm();
}
/**
* Set the clusterer to train
*
* @param clusterer the clusterer to train
*/
@ProgrammaticProperty
public void setClusterer(weka.clusterers.Clusterer clusterer) {
setWrappedAlgorithm(clusterer);
}
/**
* Get the name of the clusterer to load at execution time. This only applies
* in the case where the only incoming connection is a test set connection
* (batch mode) or an instance connection (incremental prediction mode).
*
* @return the name of the file to load the model from
*/
public File getLoadClustererFileName() {
return m_loadModelFileName;
}
/**
* Set the name of the clusterer to load at execution time. This only applies
* in the case where the only incoming connection is a test set connection
* (batch mode) or an instance connection (incremental prediction mode).
*
* @param filename the name of the file to load the model from
*/
@OptionMetadata(
displayName = "Clusterer model to load",
description = "Optional "
+ "path to a clusterer to load at execution time (only applies when using "
+ "testSet connections)")
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = false)
public void setLoadClustererFileName(File filename) {
m_loadModelFileName = filename;
}
/**
* Get the class of the wrapped algorithm
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.clusterers.Clusterer.class;
}
/**
* Set the wrapped algorithm
*
* @param algo the algorithm to wrap
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultClusterer.gif";
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
if (!(getWrappedAlgorithm() instanceof weka.clusterers.Clusterer)) {
throw new WekaException("Incorrect type of algorithm");
}
try {
m_clustererTemplate =
weka.clusterers.AbstractClusterer
.makeCopy((weka.clusterers.Clusterer) getWrappedAlgorithm());
if (m_clustererTemplate instanceof EnvironmentHandler) {
((EnvironmentHandler) m_clustererTemplate)
.setEnvironment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
}
} catch (Exception ex) {
throw new WekaException(ex);
}
// create and initialize our train/test pair helper if necessary
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
m_trainTestHelper =
new PairedDataHelper<weka.clusterers.Clusterer>(
this,
this,
StepManager.CON_TRAININGSET,
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) > 0 ? StepManager.CON_TESTSET
: null);
}
m_isReset = true;
m_streaming = false;
m_incrementalData = new Data(StepManager.CON_INCREMENTAL_CLUSTERER);
if (getLoadClustererFileName() != null
&& getLoadClustererFileName().toString().length() > 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) == 0) {
String resolvedFileName =
getStepManager().environmentSubstitute(
getLoadClustererFileName().toString());
try {
loadModel(resolvedFileName);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
/**
* Process an incoming data object
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
try {
if (m_isReset) {
m_isReset = false;
getStepManager().processing();
Instances incomingStructure = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
incomingStructure =
((Instance) data.getPayloadElement(StepManager.CON_INSTANCE))
.dataset();
} else {
incomingStructure =
(Instances) data.getPayloadElement(data.getConnectionName());
}
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
m_streaming = true;
if (m_trainedClusterer == null) {
m_trainedClusterer =
weka.clusterers.AbstractClusterer.makeCopy(m_clustererTemplate);
if (m_trainedClusterer instanceof EnvironmentHandler) {
((EnvironmentHandler) m_trainedClusterer)
.setEnvironment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
}
// TODO - support incremental training at some point?
}
} else if (data.getConnectionName().equals(StepManager.CON_TRAININGSET)) {
m_trainedClustererHeader = incomingStructure;
}
if (m_trainedClustererHeader != null
&& !incomingStructure.equalHeaders(m_trainedClustererHeader)) {
throw new WekaException("Structure of incoming data does not match "
+ "that of the trained clusterer");
}
}
if (m_streaming) {
// TODO processStreaming()
} else if (m_trainTestHelper != null) {
// train test pairs
m_trainTestHelper.process(data);
} else {
processOnlyTestSet(data);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a Data object in the case where we only have an incoming testSet
* connection
*
* @param data the Data object to process
* @throws WekaException if a problem occurs
*/
protected void processOnlyTestSet(Data data) throws WekaException {
try {
weka.clusterers.Clusterer tempToTest =
weka.clusterers.AbstractClusterer.makeCopy(m_trainedClusterer);
Data batchClusterer =
new Data(StepManager.CON_BATCH_CLUSTERER, tempToTest);
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_TESTSET,
data.getPayloadElement(StepManager.CON_AUX_DATA_TESTSET));
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1));
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1));
getStepManager().outputData(batchClusterer);
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().finished();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Get a list of connection types that could be made to this Step at this
* point in time
*
* @return a list of incoming connection types that could be made at this time
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
int numTraining =
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstance =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numTraining == 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (numTesting == 0) {
result.add(StepManager.CON_TESTSET);
}
// streaming prediction only
if (numTraining == 0 && numTesting == 0) {
result.add(StepManager.CON_INSTANCE);
}
return result;
}
/**
* Get a list of outgoing connections that could be made from this step at
* this point in time
*
* @return a list of outgoing connections that could be made at this point in
* time
*/
@Override
public List<String> getOutgoingConnectionTypes() {
int numTraining =
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
List<String> result = new ArrayList<String>();
if (numTraining > 0 || numTesting > 0) {
result.add(StepManager.CON_BATCH_CLUSTERER);
}
result.add(StepManager.CON_TEXT);
if (getClusterer() instanceof Drawable && numTraining > 0) {
result.add(StepManager.CON_GRAPH);
}
// info connection - downstream steps can get our wrapped clusterer
// for information (configuration) purposes
result.add(StepManager.CON_INFO);
return result;
}
/**
* Load a pre-trained model from the supplied path
*
* @param filePath the path to load the model from
* @throws Exception if a problem occurs
*/
protected void loadModel(String filePath) throws Exception {
ObjectInputStream is = null;
try {
is =
new ObjectInputStream(new BufferedInputStream(new FileInputStream(
new File(filePath))));
m_trainedClusterer = (weka.clusterers.Clusterer) is.readObject();
// try and read the header
try {
m_trainedClustererHeader = (Instances) is.readObject();
} catch (Exception ex) {
getStepManager().logWarning(
"Model file '" + filePath
+ "' does not seem to contain an Instances header");
}
} finally {
if (is != null) {
is.close();
}
}
}
/**
* Output a Data object containing a dot graph, if the model is Drawable and
* we have downstream steps receiving graph connections.
*
* @param clusterer the clusterer to generate the graph from
* @param setNum the set number of the data used to generate the graph
* @throws WekaException if a problem occurs
*/
protected void
outputGraphData(weka.clusterers.Clusterer clusterer, int setNum)
throws WekaException {
if (clusterer instanceof Drawable) {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_GRAPH) == 0) {
return;
}
try {
String graphString = ((Drawable) clusterer).graph();
int graphType = ((Drawable) clusterer).graphType();
String grphTitle = clusterer.getClass().getCanonicalName();
grphTitle =
grphTitle.substring(grphTitle.lastIndexOf('.') + 1,
grphTitle.length());
grphTitle =
"Set " + setNum + " (" + m_trainedClustererHeader.relationName()
+ ") " + grphTitle;
Data graphData = new Data(StepManager.CON_GRAPH);
graphData.setPayloadElement(StepManager.CON_GRAPH, graphString);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TITLE,
grphTitle);
graphData.setPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TYPE,
graphType);
getStepManager().outputData(graphData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
/**
* Output a Data object containing a textual description of a model to any
* outgoing text connections
*
* @param clusterer the clusterer to get the textual description of
* @param setNum the set number of the training data
* @throws WekaException if a problem occurs
*/
protected void
outputTextData(weka.clusterers.Clusterer clusterer, int setNum)
throws WekaException {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_TEXT) == 0) {
return;
}
Data textData = new Data(StepManager.CON_TEXT);
String modelString = clusterer.toString();
String titleString = clusterer.getClass().getName();
titleString =
titleString.substring(titleString.lastIndexOf('.') + 1,
titleString.length());
modelString =
"=== Clusterer model ===\n\n" + "Scheme: " + titleString + "\n"
+ "Relation: " + m_trainedClustererHeader.relationName() + "\n\n"
+ modelString;
titleString = "Model: " + titleString;
textData.setPayloadElement(StepManager.CON_TEXT, modelString);
textData
.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, titleString);
if (setNum != -1) {
textData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
}
getStepManager().outputData(textData);
}
/**
* Output batch clusterer data to downstream steps
*
* @param clusterer the clusterer to outpit
* @param setNum the set number of the current dataset
* @param maxSetNum the maximum set number
* @param trainingSplit the training data
* @param testSplit the test data, or null if there is no test data
* @throws WekaException if a problem occurs
*/
protected void outputBatchClusterer(weka.clusterers.Clusterer clusterer,
int setNum, int maxSetNum, Instances trainingSplit, Instances testSplit)
throws WekaException {
Data batchClusterer = new Data(StepManager.CON_BATCH_CLUSTERER, clusterer);
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET,
trainingSplit);
if (testSplit != null) {
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_TESTSET,
testSplit);
}
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
batchClusterer.setPayloadElement(StepManager.CON_AUX_DATA_LABEL, getName());
getStepManager().outputData(batchClusterer);
}
/**
* Process a training split (primary data handled by the PairedDataHelper)
*
* @param setNum the number of this split/fold
* @param maxSetNum the maximum number of splits/folds in the group
* @param data the actual split/fold data
* @param helper the PairedDataHelper managing the paired data
* @return a Classifier trained on this training split
* @throws WekaException if a problem occurs
*/
@Override
public weka.clusterers.Clusterer processPrimary(Integer setNum,
Integer maxSetNum, Data data,
PairedDataHelper<weka.clusterers.Clusterer> helper) throws WekaException {
Instances trainingData = data.getPrimaryPayload();
try {
weka.clusterers.Clusterer clusterer =
weka.clusterers.AbstractClusterer.makeCopy(m_clustererTemplate);
String clustererDesc = clusterer.getClass().getCanonicalName();
clustererDesc =
clustererDesc.substring(clustererDesc.lastIndexOf('.') + 1);
if (clusterer instanceof OptionHandler) {
String optsString =
Utils.joinOptions(((OptionHandler) clusterer).getOptions());
clustererDesc += " " + optsString;
}
if (clusterer instanceof EnvironmentHandler) {
((EnvironmentHandler) clusterer).setEnvironment(getStepManager()
.getExecutionEnvironment().getEnvironmentVariables());
}
// retain the training data
helper
.addIndexedValueToNamedStore("trainingSplits", setNum, trainingData);
if (!isStopRequested()) {
getStepManager().logBasic(
"Building " + clustererDesc + " on " + trainingData.relationName()
+ " for fold/set " + setNum + " out of " + maxSetNum);
if (maxSetNum == 1) {
// single train/test split - makes sense to retain this trained
// classifier
m_trainedClusterer = clusterer;
}
clusterer.buildClusterer(trainingData);
getStepManager().logDetailed(
"Finished building " + clustererDesc + "on "
+ trainingData.relationName() + " for fold/set " + setNum
+ " out of " + maxSetNum);
outputTextData(clusterer, setNum);
outputGraphData(clusterer, setNum);
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TESTSET) == 0) {
// output a butch clusterer for just the trained model
outputBatchClusterer(clusterer, setNum, maxSetNum, trainingData, null);
}
}
return clusterer;
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a test split/fold (secondary data handled by PairedDataHelper)
*
* @param setNum the set number of this split/fold
* @param maxSetNum the maximum number of splits/folds in the group
* @param data the actual split/fold data
* @param helper the PairedDataHelper managing the paried data
* @throws WekaException if a problem occurs
*/
@Override
public void processSecondary(Integer setNum, Integer maxSetNum, Data data,
PairedDataHelper<weka.clusterers.Clusterer> helper) throws WekaException {
// trained clusterer for this set number
weka.clusterers.Clusterer clusterer =
helper.getIndexedPrimaryResult(setNum);
// test data
Instances testSplit = data.getPrimaryPayload();
// paired training data
Instances trainingSplit =
helper.getIndexedValueFromNamedStore("trainingSplits", setNum);
getStepManager().logBasic(
"Dispatching model for set " + setNum + " out of " + maxSetNum
+ " to output");
outputBatchClusterer(clusterer, setNum, maxSetNum, trainingSplit, testSplit);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ClustererPerformanceEvaluator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Clusterer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.clusterers.ClusterEvaluation;
import weka.core.Instances;
import weka.core.OptionHandler;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.List;
/**
* A step that evaluates the performance of batch trained clusterers
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ClustererPerformanceEvaluator", category = "Evaluation",
toolTipText = "Evaluates batch clusterers",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ClustererPerformanceEvaluator.gif")
public class ClustererPerformanceEvaluator extends BaseStep {
private static final long serialVersionUID = -6337375482954345717L;
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_BATCH_CLUSTERER) == 0) {
result.add(StepManager.CON_BATCH_CLUSTERER);
}
return result;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 0) {
result.add(StepManager.CON_TEXT);
// result.add(StepManager.CON_VISUALIZABLE_ERROR);
}
return result;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
weka.clusterers.Clusterer clusterer = (weka.clusterers.Clusterer) data
.getPayloadElement(StepManager.CON_BATCH_CLUSTERER);
Instances trainData =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET);
Instances testData =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TESTSET);
Integer setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Integer maxSetNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
if (setNum == 1) {
getStepManager().processing();
}
ClusterEvaluation eval = new ClusterEvaluation();
eval.setClusterer(clusterer);
// cluster evaluation is no cumulative across sets, so each
// set is a separate evaluation
String clusterSpec = makeClustererSpec(clusterer);
String clusterClass = clusterer.getClass().getCanonicalName();
clusterClass = clusterClass.substring(clusterClass.lastIndexOf('.') + 1,
clusterClass.length());
if (trainData != null && !isStopRequested()) {
getStepManager().statusMessage("Evaluating (training set " + setNum
+ " of " + maxSetNum + ") " + clusterSpec);
try {
eval.evaluateClusterer(trainData);
} catch (Exception ex) {
throw new WekaException(ex);
}
if (!isStopRequested()) {
String resultT = "=== Evaluation result for training instances ===\n\n"
+ "Scheme: " + clusterSpec + "\n" + "Relation: "
+ trainData.relationName() + "\n\n" + eval.clusterResultsToString();
if (trainData.classIndex() >= 0
&& trainData.classAttribute().isNumeric()) {
resultT +=
"\n\nNo class-based evaluation possible. Class attribute has to be "
+ "nominal.";
}
Data text = new Data(StepManager.CON_TEXT, resultT);
text.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
clusterClass + " train (" + setNum + " of " + maxSetNum + ")");
getStepManager().outputData(text);
}
}
if (testData != null && !isStopRequested()) {
getStepManager().statusMessage("Evaluating (test set " + setNum + " of "
+ maxSetNum + ") " + clusterSpec);
eval = new ClusterEvaluation();
eval.setClusterer(clusterer);
try {
eval.evaluateClusterer(testData);
} catch (Exception ex) {
throw new WekaException(ex);
}
if (!isStopRequested()) {
String resultT = "=== Evaluation result for test instances ===\n\n"
+ "Scheme: " + clusterSpec + "\n" + "Relation: "
+ testData.relationName() + "\n\n" + eval.clusterResultsToString();
if (testData.classIndex() >= 0
&& testData.classAttribute().isNumeric()) {
resultT +=
"\n\nNo class-based evaluation possible. Class attribute has to be "
+ "nominal.";
}
Data text = new Data(StepManager.CON_TEXT, resultT);
text.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
clusterClass + " test (" + setNum + " of " + maxSetNum + ")");
getStepManager().outputData(text);
}
}
if (isStopRequested()) {
getStepManager().interrupted();
} else if (setNum.intValue() == maxSetNum.intValue()) {
getStepManager().finished();
}
}
protected String makeClustererSpec(weka.clusterers.Clusterer clusterer) {
String clusterSpec = clusterer.getClass().getCanonicalName();
clusterSpec = clusterSpec.substring(clusterSpec.lastIndexOf('.') + 1,
clusterSpec.length());
String opts = " ";
if (clusterer instanceof OptionHandler) {
opts = Utils.joinOptions(((OptionHandler) clusterer).getOptions());
}
return clusterSpec + opts;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/CostBenefitAnalysis.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* CostBenefitAnalysis
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.visualize.PlotData2D;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Step for storing and viewing threshold data in a cost-benefit visualization
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "CostBenefitAnalysis", category = "Visualization",
toolTipText = "View threshold data in an interactive cost-benefit visualization",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ModelPerformanceChart.gif")
public class CostBenefitAnalysis extends BaseSimpleDataVisualizer {
private static final long serialVersionUID = 7756281775575854085L;
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_THRESHOLD_DATA);
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
*/
@Override
public void processIncoming(Data data) {
getStepManager().processing();
PlotData2D pd = data.getPrimaryPayload();
getStepManager().logDetailed("Processing " + pd.getPlotName());
m_data.add(data);
getStepManager().finished();
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_data.size() > 0) {
views.put("Show plots",
"weka.gui.knowledgeflow.steps.CostBenefitAnalysisInteractiveView");
}
return views;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/CrossValidationFoldMaker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* CrossValidationFoldMaker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Step for generating cross-validation splits
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "CrossValidationFoldMaker", category = "Evaluation", toolTipText = "A Step that creates stratified cross-validation folds from incoming data", iconPath = KFGUIConsts.BASE_ICON_PATH + "CrossValidationFoldMaker.gif")
public class CrossValidationFoldMaker extends BaseStep {
private static final long serialVersionUID = 6090713408437825355L;
/** True to preserve order of the instances rather than randomly shuffling */
protected boolean m_preserveOrder;
/** User-specified number of folds */
protected String m_numFoldsS = "10";
/** User-specified random seed */
protected String m_seedS = "1";
/** Resolved number of folds */
protected int m_numFolds = 10;
/** Resolved random seed */
protected long m_seed = 1L;
/**
* Set the number of folds to create
*
* @param folds the number of folds to create
*/
@OptionMetadata(displayName = "Number of folds", description = "THe number of folds to create", displayOrder = 0)
public void setNumFolds(final String folds) {
this.m_numFoldsS = folds;
}
/**
* Get the number of folds to create
*
* @return the number of folds to create
*/
public String getNumFolds() {
return this.m_numFoldsS;
}
/**
* Set whether to preserve the order of the input instances when creatinbg
* the folds
*
* @param preserve true to preserve the order
*/
@OptionMetadata(displayName = "Preserve instances order", description = "Preserve the order of instances rather than randomly shuffling", displayOrder = 1)
public void setPreserveOrder(final boolean preserve) {
this.m_preserveOrder = preserve;
}
/**
* Get whether to preserve the order of the input instances when creatinbg
* the folds
*
* @return true to preserve the order
*/
public boolean getPreserveOrder() {
return this.m_preserveOrder;
}
/**
* Set the random seed to use
*
* @param seed the random seed to use
*/
@OptionMetadata(displayName = "Random seed", description = "The random seed to use for shuffling", displayOrder = 3)
public void setSeed(final String seed) {
this.m_seedS = seed;
}
/**
* Get the random seed
*
* @return the random seed
*/
public String getSeed() {
return this.m_seedS;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
String seed = this.getStepManager().environmentSubstitute(this.getSeed());
try {
this.m_seed = Long.parseLong(seed);
} catch (NumberFormatException ex) {
this.getStepManager().logWarning("Unable to parse seed value: " + seed);
}
String folds = this.getStepManager().environmentSubstitute(this.getNumFolds());
try {
this.m_numFolds = Integer.parseInt(folds);
} catch (NumberFormatException e) {
this.getStepManager().logWarning("Unable to parse number of folds value: " + folds);
}
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
* @throws InterruptedException
*/
@Override
public void processIncoming(final Data data) throws WekaException, InterruptedException {
this.getStepManager().processing();
String incomingConnName = data.getConnectionName();
Instances dataSet = (Instances) data.getPayloadElement(incomingConnName);
if (dataSet == null) {
throw new WekaException("Incoming instances should not be null!");
}
dataSet = new Instances(dataSet);
this.getStepManager().logBasic("Creating cross-validation folds");
this.getStepManager().statusMessage("Creating cross-validation folds");
Random random = new Random(this.m_seed);
if (!this.getPreserveOrder()) {
dataSet.randomize(random);
}
if (dataSet.classIndex() >= 0 && dataSet.attribute(dataSet.classIndex()).isNominal() && !this.getPreserveOrder()) {
this.getStepManager().logBasic("Stratifying data");
dataSet.stratify(this.m_numFolds);
}
for (int i = 0; i < this.m_numFolds; i++) {
if (this.isStopRequested()) {
break;
}
try {
Instances train = (!this.m_preserveOrder) ? dataSet.trainCV(this.m_numFolds, i, random) : dataSet.trainCV(this.m_numFolds, i);
Instances test = dataSet.testCV(this.m_numFolds, i);
Data trainData = new Data(StepManager.CON_TRAININGSET);
trainData.setPayloadElement(StepManager.CON_TRAININGSET, train);
trainData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, i + 1);
trainData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, this.m_numFolds);
Data testData = new Data(StepManager.CON_TESTSET);
testData.setPayloadElement(StepManager.CON_TESTSET, test);
testData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, i + 1);
testData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, this.m_numFolds);
if (!this.isStopRequested()) {
this.getStepManager().outputData(trainData, testData);
}
} catch (InterruptedException e) {
throw new WekaException(e);
}
}
this.getStepManager().finished();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (this.getStepManager().numIncomingConnections() > 0) {
return new ArrayList<String>();
}
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return this.getStepManager().numIncomingConnections() > 0 ? Arrays.asList(StepManager.CON_TRAININGSET, StepManager.CON_TESTSET) : new ArrayList<String>();
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(final String connectionName) throws WekaException {
// we produce training and testset connections
if ((!connectionName.equals(StepManager.CON_TRAININGSET) && !connectionName.equals(StepManager.CON_TESTSET)) || this.getStepManager().numIncomingConnections() == 0) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
Instances strucForDatasetCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
return strucForDatasetCon;
}
Instances strucForTestsetCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_TESTSET);
if (strucForTestsetCon != null) {
return strucForTestsetCon;
}
Instances strucForTrainingCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_TRAININGSET);
if (strucForTrainingCon != null) {
return strucForTrainingCon;
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/DataCollector.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* DataCollector.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.WekaException;
/**
* Auxiliary interface for steps that collect data results of some type - e.g.
* visualization steps that collect results.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public interface DataCollector {
/**
* Get the data that this collector has collected
*
* @return the data collected by this collector
*/
public Object retrieveData();
/**
* Set the data for this collector
*
* @param data the data to set
* @throws WekaException if there is a problem restoring data
*/
public void restoreData(Object data) throws WekaException;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/DataGenerator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* DataGenerator.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.StreamThroughput;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
/**
* Step that wraps a Weka DataGenerator.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "DataGenerator", category = "DataGenerators",
toolTipText = "Weka data generator wrapper", iconPath = "")
public class DataGenerator extends WekaAlgorithmWrapper {
private static final long serialVersionUID = -7716707145987484527L;
/** reusable data object for streaming */
protected Data m_incrementalData;
/** overall flow throughput when streaming */
protected StreamThroughput m_flowThroughput;
/**
* Get the class of the wrapped algorithm
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.datagenerators.DataGenerator.class;
}
/**
* Set the algorithm to wrap
*
* @param algo the algorithm to wrao
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultDataSource.gif";
}
/**
* get the data generator
*
* @return the data generator
*/
public weka.datagenerators.DataGenerator getDataGenerator() {
return (weka.datagenerators.DataGenerator) getWrappedAlgorithm();
}
/**
* Set the data generator
*
* @param dataGenerator
*/
@ProgrammaticProperty
public void setDataGenerator(weka.datagenerators.DataGenerator dataGenerator) {
setWrappedAlgorithm(dataGenerator);
}
/**
* Initialize the step. Nothing to do in this case.
*/
@Override
public void stepInit() {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
m_incrementalData = new Data(StepManager.CON_INSTANCE);
} else {
m_incrementalData = null;
m_flowThroughput = null;
}
}
/**
* Start the data generation process.
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
if (getStepManager().numOutgoingConnections() > 0) {
weka.datagenerators.DataGenerator generator = getDataGenerator();
if (getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_DATASET) > 0) {
getStepManager().processing();
StringWriter output = new StringWriter();
try {
generator.setOutput(new PrintWriter(output));
getStepManager().statusMessage("Generating...");
getStepManager().logBasic("Generating data");
weka.datagenerators.DataGenerator.makeData(generator,
generator.getOptions());
Instances instances =
new Instances(new StringReader(output.toString()));
if (!isStopRequested()) {
Data outputData = new Data(StepManager.CON_DATASET, instances);
getStepManager().outputData(outputData);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().finished();
}
} else {
// streaming case
try {
if (!generator.getSingleModeFlag()) {
throw new WekaException("Generator does not support "
+ "incremental generation, so cannot be used with "
+ "outgoing 'instance' connections");
}
} catch (Exception ex) {
throw new WekaException(ex);
}
String stm =
getName() + "$" + hashCode() + 99 + "| overall flow throughput -|";
m_flowThroughput =
new StreamThroughput(stm, "Starting flow...",
((StepManagerImpl) getStepManager()).getLog());
try {
getStepManager().logBasic("Generating...");
generator.setDatasetFormat(generator.defineDataFormat());
for (int i = 0; i < generator.getNumExamplesAct(); i++) {
m_flowThroughput.updateStart();
getStepManager().throughputUpdateStart();
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
// over all examples to be produced
Instance inst = generator.generateExample();
m_incrementalData.setPayloadElement(StepManager.CON_INSTANCE, inst);
getStepManager().throughputUpdateEnd();
getStepManager().outputData(m_incrementalData);
m_flowThroughput.updateEnd(((StepManagerImpl) getStepManager())
.getLog());
}
if (isStopRequested()) {
((StepManagerImpl) getStepManager()).getLog().statusMessage(
stm + "remove");
getStepManager().interrupted();
return;
}
m_flowThroughput.finished(((StepManagerImpl) getStepManager())
.getLog());
// signal end of input
m_incrementalData.clearPayload();
getStepManager().throughputFinished(m_incrementalData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().isStepBusy()) {
return null;
}
weka.datagenerators.DataGenerator generator = getDataGenerator();
try {
return generator.defineDataFormat();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Get acceptable incoming connection types. None in this case since this step
* is a start point
*
* @return null (no acceptable incoming connections)
*/
@Override
public List<String> getIncomingConnectionTypes() {
return null;
}
/**
* Get a list of outgoing connection types
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numOutgoingConnections() == 0) {
result.add(StepManager.CON_DATASET);
try {
if (getDataGenerator().getSingleModeFlag()) {
result.add(StepManager.CON_INSTANCE);
}
} catch (Exception ex) {
ex.printStackTrace();
}
} else if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
} else {
result.add(StepManager.CON_INSTANCE);
}
return result;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/DataGrid.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* DataGrid.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SerializedObject;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.StreamThroughput;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
/**
* A step that allows the user to define instances to output
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "DataGrid", category = "DataSources",
toolTipText = "Specify a grid of data to turn into instances",
iconPath = StepVisual.BASE_ICON_PATH + "ArffLoader.gif")
public class DataGrid extends BaseStep {
private static final long serialVersionUID = 1318159328875458847L;
/** The instances to output (as a string) */
protected String m_data = "";
/** Reusable data object for streaming output */
protected Data m_incrementalData;
/** For overall stream throughput measuring */
protected StreamThroughput m_flowThroughput;
/**
* Set the data to be output by this {@code DataGrid} in textual ARFF format.
*
* @param data the data to be output in textual ARFF format
*/
@ProgrammaticProperty
public void setData(String data) {
m_data = data;
}
/**
* Get the data to be output by this {@code DataGrid} in textual ARFF format
*
* @return the data to be output in textual ARFF format
*/
public String getData() {
return m_data;
}
/**
* Initialize the step;
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
m_incrementalData = new Data(StepManager.CON_INSTANCE);
} else {
m_incrementalData = null;
m_flowThroughput = null;
}
}
/**
* Start processing
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
if (getStepManager().numOutgoingConnections() > 0) {
if (m_data.length() == 0) {
getStepManager().logWarning("No data to output!");
} else {
try {
// make instances
String data = environmentSubstitute(m_data);
Instances toOutput = new Instances(new StringReader(data));
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
getStepManager().processing();
Data batch = new Data(StepManager.CON_DATASET, toOutput);
getStepManager().outputData(batch);
getStepManager().finished();
} else {
// streaming case
String stm =
getName() + "$" + hashCode() + 99
+ "| overall flow throughput -|";
m_flowThroughput =
new StreamThroughput(stm, "Starting flow...",
((StepManagerImpl) getStepManager()).getLog());
Instances structure = toOutput.stringFreeStructure();
Instances structureCopy = null;
Instances currentStructure = structure;
boolean containsStrings = toOutput.checkForStringAttributes();
if (containsStrings) {
structureCopy =
(Instances) (new SerializedObject(structure).getObject());
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
for (int i = 0; i < toOutput.numInstances(); i++) {
if (isStopRequested()) {
break;
}
Instance nextInst = toOutput.instance(i);
m_flowThroughput.updateStart();
getStepManager().throughputUpdateStart();
if (containsStrings) {
if (currentStructure == structure) {
currentStructure = structureCopy;
} else {
currentStructure = structure;
}
for (int j = 0; j < toOutput.numAttributes(); j++) {
if (toOutput.attribute(j).isString()) {
if (!nextInst.isMissing(j)) {
currentStructure.attribute(j).setStringValue(
nextInst.stringValue(j));
nextInst.setValue(j, 0);
}
}
}
}
nextInst.setDataset(currentStructure);
m_incrementalData.setPayloadElement(StepManager.CON_INSTANCE,
nextInst);
getStepManager().throughputUpdateEnd();
getStepManager().outputData(m_incrementalData);
m_flowThroughput.updateEnd(((StepManagerImpl) getStepManager())
.getLog());
}
if (isStopRequested()) {
((StepManagerImpl) getStepManager()).getLog().statusMessage(
stm + "remove");
getStepManager().interrupted();
return;
}
m_flowThroughput.finished(((StepManagerImpl) getStepManager())
.getLog());
// signal end of input
m_incrementalData.clearPayload();
getStepManager().throughputFinished(m_incrementalData);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
} else {
getStepManager().logWarning("No connected outputs");
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().isStepBusy()) {
return null;
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_DATASET) == 0
&& getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) == 0) {
return null;
}
try {
return new Instances(new StringReader(m_data)).stringFreeStructure();
} catch (IOException e) {
throw new WekaException(e);
}
}
@Override
public List<String> getIncomingConnectionTypes() {
return null;
}
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numOutgoingConnections() == 0) {
result.add(StepManager.CON_DATASET);
result.add(StepManager.CON_INSTANCE);
} else if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
} else {
result.add(StepManager.CON_INSTANCE);
}
return result;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.DataGridStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/DataVisualizer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* DataVisualizer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.PluginManager;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.OffscreenChartRenderer;
import weka.gui.beans.WekaOffscreenChartRenderer;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.visualize.PlotData2D;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.awt.image.BufferedImage;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A step that provides a visualization based on
* weka.gui.visualize.VisualizePanel
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "DataVisualizer", category = "Visualization",
toolTipText = "Visualize training/test sets in a 2D scatter plot.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DefaultDataVisualizer.gif")
public class DataVisualizer extends BaseStep implements DataCollector {
private static final long serialVersionUID = -8013077913672918384L;
/** Current set of plots. First element is the master plot */
protected List<PlotData2D> m_plots = new ArrayList<PlotData2D>();
protected transient OffscreenChartRenderer m_offscreenRenderer;
/** Name of the renderer to use for offscreen chart rendering */
protected String m_offscreenRendererName = "Weka Chart Renderer";
/**
* The name of the attribute to use for the x-axis of offscreen plots. If left
* empty, False Positive Rate is used for threshold curves
*/
protected String m_xAxis = "";
/**
* The name of the attribute to use for the y-axis of offscreen plots. If left
* empty, True Positive Rate is used for threshold curves
*/
protected String m_yAxis = "";
/**
* Additional options for the offscreen renderer
*/
protected String m_additionalOptions = "";
/** Width of offscreen plots */
protected String m_width = "500";
/** Height of offscreen plots */
protected String m_height = "400";
/**
* Set the name of the attribute for the x-axis in offscreen plots. This
* defaults to "False Positive Rate" for threshold curves if not specified.
*
* @param xAxis the name of the xAxis
*/
@OptionMetadata(displayName = "X-axis attribute",
description = "Attribute name " + "or /first, /last or /<index>",
displayOrder = 1)
public void setOffscreenXAxis(String xAxis) {
m_xAxis = xAxis;
}
/**
* Get the name of the attribute for the x-axis in offscreen plots
*
* @return the name of the xAxis
*/
public String getOffscreenXAxis() {
return m_xAxis;
}
/**
* Set the name of the attribute for the y-axis in offscreen plots. This
* defaults to "True Positive Rate" for threshold curves if not specified.
*
* @param yAxis the name of the xAxis
*/
@OptionMetadata(displayName = "Y-axis attribute",
description = "Attribute name " + "or /first, /last or /<index>",
displayOrder = 2)
public void setOffscreenYAxis(String yAxis) {
m_yAxis = yAxis;
}
/**
* Get the name of the attribute for the y-axix of offscreen plots.
*
* @return the name of the yAxis.
*/
public String getOffscreenYAxis() {
return m_yAxis;
}
/**
* Set the width (in pixels) of the offscreen image to generate.
*
* @param width the width in pixels.
*/
@OptionMetadata(displayName = "Chart width (pixels)",
description = "Width of the rendered chart", displayOrder = 3)
public void setOffscreenWidth(String width) {
m_width = width;
}
/**
* Get the width (in pixels) of the offscreen image to generate.
*
* @return the width in pixels.
*/
public String getOffscreenWidth() {
return m_width;
}
/**
* Set the height (in pixels) of the offscreen image to generate
*
* @param height the height in pixels
*/
@OptionMetadata(displayName = "Chart height (pixels)",
description = "Height of the rendered chart", displayOrder = 4)
public void setOffscreenHeight(String height) {
m_height = height;
}
/**
* Get the height (in pixels) of the offscreen image to generate
*
* @return the height in pixels
*/
public String getOffscreenHeight() {
return m_height;
}
/**
* Set the name of the renderer to use for offscreen chart rendering
* operations
*
* @param rendererName the name of the renderer to use
*/
@ProgrammaticProperty
public void setOffscreenRendererName(String rendererName) {
m_offscreenRendererName = rendererName;
m_offscreenRenderer = null;
}
/**
* Get the name of the renderer to use for offscreen chart rendering
* operations
*
* @return the name of the renderer to use
*/
public String getOffscreenRendererName() {
return m_offscreenRendererName;
}
/**
* Set the additional options for the offscreen renderer
*
* @param additional additional options
*/
@ProgrammaticProperty
public void setOffscreenAdditionalOpts(String additional) {
m_additionalOptions = additional;
}
/**
* Get the additional options for the offscreen renderer
*
* @return the additional options
*/
public String getOffscreenAdditionalOpts() {
return m_additionalOptions;
}
/**
* Configures the offscreen renderer to use
*/
protected void setupOffscreenRenderer() {
getStepManager().logDetailed(
"Initializing offscreen renderer: " + getOffscreenRendererName());
if (m_offscreenRenderer == null) {
if (m_offscreenRendererName == null
|| m_offscreenRendererName.length() == 0) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
return;
}
if (m_offscreenRendererName.equalsIgnoreCase("weka chart renderer")) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
} else {
try {
Object r =
PluginManager.getPluginInstance(
"weka.gui.beans.OffscreenChartRenderer", m_offscreenRendererName);
if (r != null && r instanceof weka.gui.beans.OffscreenChartRenderer) {
m_offscreenRenderer = (OffscreenChartRenderer) r;
} else {
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
} catch (Exception ex) {
ex.printStackTrace();
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
}
}
}
@Override
public void stepInit() throws WekaException {
// nothing to do
}
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
Instances toPlot = data.getPrimaryPayload();
String name = (new SimpleDateFormat("HH:mm:ss.SSS - ")).format(new Date());
String relationName = toPlot.relationName();
boolean connectIt = relationName.startsWith("__");
if (connectIt) {
toPlot = new Instances(toPlot);
toPlot.setRelationName(relationName.substring(2));
}
PlotData2D pd = new PlotData2D(toPlot);
if (connectIt) {
boolean[] connect = new boolean[toPlot.numInstances()];
for (int i = 1; i < toPlot.numInstances(); i++) {
if (toPlot.instance(i - 1).weight() >= 0
&& toPlot.instance(i).weight() >= 0) {
connect[i] = true;
}
}
try {
pd.setConnectPoints(connect);
} catch (Exception ex) {
throw new WekaException(ex);
}
relationName = relationName.substring(2);
}
String title = name + relationName;
getStepManager().logDetailed("Processing " + title);
pd.setPlotName(title);
m_plots.add(pd);
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_IMAGE) > 0) {
setupOffscreenRenderer();
BufferedImage osi = createOffscreenPlot(pd);
Data imageData = new Data(StepManager.CON_IMAGE, osi);
if (relationName.length() > 10) {
relationName = relationName.substring(0, 10);
}
imageData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
relationName + ":" + m_xAxis + "," + m_yAxis);
getStepManager().outputData(imageData);
}
getStepManager().finished();
}
protected BufferedImage createOffscreenPlot(PlotData2D pd)
throws WekaException {
setupOffscreenRenderer();
List<Instances> offscreenPlotInstances = new ArrayList<Instances>();
Instances predictedI = pd.getPlotInstances();
if (predictedI.classIndex() >= 0 && predictedI.classAttribute().isNominal()) {
// set up multiple series - one for each class
Instances[] classes = new Instances[predictedI.numClasses()];
for (int i = 0; i < predictedI.numClasses(); i++) {
classes[i] = new Instances(predictedI, 0);
classes[i].setRelationName(predictedI.classAttribute().value(i));
}
for (int i = 0; i < predictedI.numInstances(); i++) {
Instance current = predictedI.instance(i);
classes[(int) current.classValue()].add((Instance) current.copy());
}
for (Instances classe : classes) {
offscreenPlotInstances.add(classe);
}
} else {
offscreenPlotInstances.add(new Instances(predictedI));
}
List<String> options = new ArrayList<String>();
String additional = m_additionalOptions;
if (m_additionalOptions != null && m_additionalOptions.length() > 0) {
additional = environmentSubstitute(additional);
}
if (additional != null && !additional.contains("-color")) {
// for WekaOffscreenChartRenderer only
if (additional.length() > 0) {
additional += ",";
}
if (predictedI.classIndex() >= 0) {
additional += "-color=" + predictedI.classAttribute().name();
} else {
additional += "-color=/last";
}
}
String[] optionsParts = additional.split(",");
for (String p : optionsParts) {
options.add(p.trim());
}
String xAxis = m_xAxis;
xAxis = environmentSubstitute(xAxis);
String yAxis = m_yAxis;
yAxis = environmentSubstitute(yAxis);
String width = m_width;
String height = m_height;
int defWidth = 500;
int defHeight = 400;
width = environmentSubstitute(width);
height = environmentSubstitute(height);
defWidth = Integer.parseInt(width);
defHeight = Integer.parseInt(height);
getStepManager().logDetailed("Creating image");
try {
return predictedI.relationName().startsWith("__") ? m_offscreenRenderer
.renderXYLineChart(defWidth, defHeight, offscreenPlotInstances, xAxis,
yAxis, options) : m_offscreenRenderer.renderXYScatterPlot(defWidth,
defHeight, offscreenPlotInstances, xAxis, yAxis, options);
} catch (Exception e) {
throw new WekaException(e);
}
}
public List<PlotData2D> getPlots() {
return m_plots;
}
public void clearPlotData() {
m_plots.clear();
}
@Override
public Object retrieveData() {
return getPlots();
}
@SuppressWarnings("unchecked")
@Override
public void restoreData(Object data) throws WekaException {
if (!(data instanceof List)) {
throw new WekaException("Argument must be a List<PlotData2D>");
}
m_plots = (List<PlotData2D>) data;
// need to generate the outgoing Image data...
for (PlotData2D pd : m_plots) {
createOffscreenPlot(pd);
}
}
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET);
}
@Override
public List<String> getOutgoingConnectionTypes() {
return getStepManager().numIncomingConnections() > 0 ? Arrays
.asList(StepManager.CON_IMAGE) : new ArrayList<String>();
}
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.DataVisualizerStepEditorDialog";
}
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_plots.size() > 0) {
views.put("Show charts",
"weka.gui.knowledgeflow.steps.DataVisualizerInteractiveView");
}
return views;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Dummy.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Dummy.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.StepManager;
import java.util.Arrays;
import java.util.List;
/**
* A "dummy" no-op step
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Dummy", category = "Misc",
toolTipText = "A step that is the equivalent of /dev/null",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DiamondPlain.gif")
public class Dummy extends BaseStep {
/** For serialization */
private static final long serialVersionUID = -5822675617001689385L;
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
// nothing to do
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
// TODO add all connection types here...
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET, StepManager.CON_INSTANCE);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ExecuteProcess.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ExecuteProcess.java
* Copyright (C) 2017 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.LogManager;
import weka.knowledgeflow.StepManager;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Knowledge Flow step that can execute static system commands or commands that
* are dynamically defined by the values of attributes in incoming instance or
* environment connections.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "ExecuteProcess",
category = "Tools",
toolTipText = "Execute either static or dynamic processes. Dynamic processes "
+ "can have commands, arguments and working directories specified in the "
+ "values of incoming string/nominal attributes in data-based or environment "
+ "connections.", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "ExecuteProcess.gif")
public class ExecuteProcess extends BaseStep {
private static final long serialVersionUID = -9153714279540182885L;
/** The process that is used to execute the user's command(s) */
protected Process m_runningProcess;
/** Single static command (may use env vars) */
protected String m_staticExecCmd = "";
/** Arguments (if necessary) for single static command */
protected String m_staticArgs = "";
/** Optional working directory for single static command */
protected String m_staticWorkingDir = "";
/** True to execute commands specified in incoming instance fields */
protected boolean m_useDynamic;
/**
* Whether to raise an exception when a command fails completely (i.e. doesn't
* exist or something) vs the case of a non-zero exit status. If not raising
* an exception, then output indicating failure (with exist status = 1 in the
* case of instance connections) will be generated
*/
protected boolean m_raiseAnExceptionOnCommandFailure = true;
/**
* For commands and args in incoming instance connections (or connections that
* pass through an instance as auxiliary data - CON_ENV, CON_VAR )
*/
/** Name of attribute that will hold dynamic command to be executed */
protected String m_fieldCmd = "";
/** Name of attribute that will hold optional arguments for dynamic command */
protected String m_fieldArgs = "";
/**
* Name of attribute that will hold optional working directory for dynamic
* command
*/
protected String m_fieldWorkingDir = "";
/** Resolved attribute index of dynamic command */
protected int m_cmdFieldIndex = -1;
/** Resolved attribute index of dynamic arguments */
protected int m_argsFieldIndex = -1;
/** Resolved attribute index of dynamic working directory */
protected int m_workingDirFieldIndex = -1;
/** Std out from process */
protected StringBuffer m_stdOutbuffer;
/** Std err from process */
protected StringBuffer m_stdErrBuffer;
/** Structure of output for outgoing instance connections */
protected Instances m_instanceOutHeader;
/** True if the structure has been checked */
protected boolean m_structureCheckComplete;
/**
* Get to raise an exception when a command fails completely (i.e. doesn't
* exist or something) vs the case of a non-zero exit status. If not raising
* an exception, then output indicating failure (with exist status = 1 in the
* case of instance connections) will be generated.
*
* @return true if an exception is to be generated on catastrophic command
* failure
*/
public boolean getRaiseExceptionOnCommandFailure() {
return m_raiseAnExceptionOnCommandFailure;
}
/**
* Set to raise an exception when a command fails completely (i.e. doesn't
* exist or something) vs the case of a non-zero exit status. If not raising
* an exception, then output indicating failure (with exist status = 1 in the
* case of instance connections) will be generated.
*
* @param raiseExceptionOnCommandFailure if an exception is to be generated on
* catastrophic command failure
*/
public void setRaiseExceptionOnCommandFailure(
boolean raiseExceptionOnCommandFailure) {
m_raiseAnExceptionOnCommandFailure = raiseExceptionOnCommandFailure;
}
/**
* Get whether to execute dynamic commands
*
* @return true if dynamic commands are to be executed
*/
public boolean getUseDynamic() {
return m_useDynamic;
}
/**
* Set whether to execute dynamic commands
*
* @param useDynamic true if dynamic commands are to be executed
*/
public void setUseDynamic(boolean useDynamic) {
m_useDynamic = useDynamic;
}
/**
* Get the static command to be executed
*
* @return the static command to be executed
*/
public String getStaticCmd() {
return m_staticExecCmd;
}
/**
* Set the static command to be executed
*
* @param cmd the static command to be executed
*/
public void setStaticCmd(String cmd) {
m_staticExecCmd = cmd;
}
/**
* Get the arguments for the static command
*
* @return the arguments for the static command
*/
public String getStaticArgs() {
return m_staticArgs;
}
/**
* Set the arguments for the static command
*
* @param args the arguments for the static command
*/
public void setStaticArgs(String args) {
m_staticArgs = args;
}
/**
* Get the working directory for the static command
*
* @return the working directory for the static command
*/
public String getStaticWorkingDir() {
return m_staticWorkingDir;
}
/**
* Set the working directory for the static command
*
* @param workingDir the working directory for the static command
*/
public void setStaticWorkingDir(String workingDir) {
m_staticWorkingDir = workingDir;
}
/**
* Get the name of the attribute in the incoming instance structure that
* contains the command to execute
*
* @return the name of the attribute containing the command to execute
*/
public String getDynamicCmdField() {
return m_fieldCmd;
}
/**
* Set the name of the attribute in the incoming instance structure that
* contains the command to execute
*
* @param cmdField the name of the attribute containing the command to execute
*/
public void setDynamicCmdField(String cmdField) {
m_fieldCmd = cmdField;
}
/**
* Get the name of the attribute in the incoming instance structure that
* contains the arguments to the command to execute
*
* @return the name of the attribute containing the command's arguments
*/
public String getDynamicArgsField() {
return m_fieldArgs;
}
/**
* Set the name of the attribute in the incoming instance structure that
* contains the arguments to the command to execute
*
* @param argsField the name of the attribute containing the command's
* arguments
*/
public void setDynamicArgsField(String argsField) {
m_fieldArgs = argsField;
}
/**
* Get the name of the attribute in the incoming instance structure that
* containst the working directory for the command to execute
*
* @return the name of the attribute containing the command's working
* directory
*/
public String getDynamicWorkingDirField() {
return m_fieldWorkingDir;
}
/**
* Set the name of the attribute in the incoming instance structure that
* containst the working directory for the command to execute
*
* @param workingDirField the name of the attribute containing the command's
* working directory
*/
public void setDynamicWorkingDirField(String workingDirField) {
m_fieldWorkingDir = workingDirField;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_runningProcess = null;
m_structureCheckComplete = false;
Environment currentEnv =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (currentEnv != null && !(currentEnv instanceof JobEnvironment)) {
currentEnv = new JobEnvironment(currentEnv);
getStepManager().getExecutionEnvironment().setEnvironmentVariables(
currentEnv);
}
if (!m_useDynamic && m_staticExecCmd.length() == 0) {
throw new WekaException("No command to execute specified!");
}
if (m_useDynamic) {
if (m_fieldCmd.length() == 0) {
throw new WekaException(
"No incoming attribute specified for obtaining command to execute!");
}
if (getStepManager().numIncomingConnections() == 0) {
throw new WekaException(
"Dynamic command to execute specified, but there "
+ "are no incoming connections!");
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) == 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_ENVIRONMENT) == 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_DATASET) == 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) == 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TESTSET) == 0) {
throw new WekaException(
"Dynamic command execution can only be executed "
+ "on incoming instance, environment, dataset, trainingset or testset"
+ " connections");
}
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) > 0
|| getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_ENVIRONMENT) > 0) {
ArrayList<Attribute> atts = new ArrayList<>();
atts.add(new Attribute("ExitStatus"));
atts.add(new Attribute("StdOut", (List<String>) null));
atts.add(new Attribute("StdErr", (List<String>) null));
m_instanceOutHeader = new Instances("ProcessResults", atts, 0);
}
}
/**
* Start processing if operating as a start point in a flow
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
if (getStepManager().numIncomingConnections() == 0) {
try {
ProcessBuilder builder = makeStaticProcess();
getStepManager().processing();
runProcess(builder, null, null, null);
} catch (Exception e) {
throw new WekaException(e);
} finally {
getStepManager().finished();
}
}
}
/**
* Construct a ProcessBuilder instance for executing a static command
*
* @return a ProcessBuilder instance
* @throws Exception if a problem occurs
*/
protected ProcessBuilder makeStaticProcess() throws Exception {
List<String> cmdList = new ArrayList<>();
cmdList.add(environmentSubstitute(m_staticExecCmd));
String[] args = Utils.splitOptions(environmentSubstitute(m_staticArgs));
cmdList.addAll(Arrays.asList(args));
ProcessBuilder builder =
new ProcessBuilder(cmdList.toArray(new String[cmdList.size()]));
if (m_staticWorkingDir.length() > 0) {
builder.directory(new File(environmentSubstitute(m_staticWorkingDir)));
}
return builder;
}
/**
* Construct a ProcessBuilder instance for executing a dynamic command
*
* @param incoming the incoming instance containing the command details
* @return a ProcessBuilder instance
* @throws Exception if a problem occurs
*/
protected ProcessBuilder makeDynamicProcess(Instance incoming)
throws Exception {
if (!incoming.isMissing(m_cmdFieldIndex)) {
String dynamicCommandVal =
environmentSubstitute(incoming.stringValue(m_cmdFieldIndex));
String dynamicOpts = "";
String dynamicWorkingDir = "";
if (m_argsFieldIndex >= 0) {
if (!incoming.isMissing(m_argsFieldIndex)) {
dynamicOpts =
environmentSubstitute(incoming.stringValue(m_argsFieldIndex));
}
}
if (m_workingDirFieldIndex >= 0) {
if (!incoming.isMissing(m_workingDirFieldIndex)) {
dynamicWorkingDir =
environmentSubstitute(incoming.stringValue(m_workingDirFieldIndex));
}
}
List<String> cmdList = new ArrayList<>();
cmdList.add(dynamicCommandVal);
String[] args = Utils.splitOptions(dynamicOpts);
cmdList.addAll(Arrays.asList(args));
ProcessBuilder builder =
new ProcessBuilder(cmdList.toArray(new String[cmdList.size()]));
if (dynamicWorkingDir.length() > 0) {
builder.directory(new File(dynamicWorkingDir));
}
return builder;
} else {
getStepManager().logWarning(
"Value of command to execute is missing in " + "incoming instance");
return null;
}
}
/**
* Execute a configured process
*
* @param builder the ProcessBuilder to execute
* @param varsToSet environment variables to pass on to the ProcessBuilder
* @param propsToSet properties to pass on downstream
* @param results results to pass on downstream
* @return exit status code of process
* @throws IOException if a problem occurs
* @throws InterruptedException if a problem occurs
* @throws WekaException if a problem occurs
*/
protected void runProcess(ProcessBuilder builder,
Map<String, String> varsToSet, Map<String, Map<String, String>> propsToSet,
Map<String, LinkedHashSet<Data>> results) throws IOException,
InterruptedException, WekaException {
Map<String, String> env = builder.environment();
// add environment vars
Environment flowEnv =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (flowEnv != null) {
Set<String> vars = flowEnv.getVariableNames();
for (String var : vars) {
env.put(var, flowEnv.getVariableValue(var));
}
}
StringWriter stdOutWriter = new StringWriter();
StringWriter stdErrWriter = new StringWriter();
try {
m_stdOutbuffer = new StringBuffer();
m_stdErrBuffer = new StringBuffer();
m_runningProcess = builder.start();
copy(m_runningProcess.getInputStream(), stdOutWriter);
copy(m_runningProcess.getErrorStream(), stdErrWriter);
int status = m_runningProcess.waitFor();
m_stdOutbuffer = stdOutWriter.getBuffer();
m_stdErrBuffer = stdErrWriter.getBuffer();
if (status == 0) {
handleOutputSuccess(varsToSet, propsToSet, results,
Utils.joinOptions(builder.command().toArray(new String[0])));
} else {
handleOutputFailure(status, varsToSet, propsToSet, results,
Utils.joinOptions(builder.command().toArray(new String[0])));
}
} catch (IOException ex) {
if (m_raiseAnExceptionOnCommandFailure) {
throw ex;
}
getStepManager().logWarning(
"Command: "
+ Utils.joinOptions(builder.command().toArray(new String[0]))
+ " failed with exception:\n" + LogManager.stackTraceToString(ex));
handleOutputFailure(1, varsToSet, propsToSet, results,
Utils.joinOptions(builder.command().toArray(new String[0])));
}
}
/**
* Output data relating to successful execution of a command
*
* @param varsToSet environment variables to pass on downstream
* @param propsToSet properties to pass on downstream
* @param results results to pass on downstream
* @param command the actual command that was executed
* @throws WekaException if a problem occurs
*/
protected void handleOutputSuccess(Map<String, String> varsToSet,
Map<String, Map<String, String>> propsToSet,
Map<String, LinkedHashSet<Data>> results, String command)
throws WekaException {
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_JOB_SUCCESS) > 0) {
Data success =
new Data(StepManager.CON_JOB_SUCCESS,
m_stdOutbuffer.length() > 0 ? m_stdOutbuffer.toString()
: "Process completed successfully: " + command);
success.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
addAuxToData(success, varsToSet, propsToSet, results);
getStepManager().outputData(success);
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
// conn instance presumes that additional info (such as props/env settings
// in instance data and results are not needed in the downstream steps).
// Otherwise, conn job success/failure or conn environment should be used
// as output
m_instanceOutHeader.attribute(1).setStringValue(
m_stdOutbuffer.length() > 0 ? m_stdOutbuffer.toString()
: "Process completed successfully");
m_instanceOutHeader.attribute(2).setStringValue(
m_stdErrBuffer.length() > 0 ? m_stdErrBuffer.toString() : "");
double[] vals = new double[3];
vals[0] = 0; // success
Instance outputInst = new DenseInstance(1.0, vals);
outputInst.setDataset(m_instanceOutHeader);
Data instOut = new Data(StepManager.CON_INSTANCE, outputInst);
getStepManager().outputData(instOut);
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_TEXT) > 0) {
Data textOut = new Data(StepManager.CON_TEXT, m_stdOutbuffer.toString());
textOut.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
"Process completed successfully: " + command);
getStepManager().outputData(textOut);
}
}
/**
* Output data relating to an unsuccessful execution of a command
*
* @param returnCode the return code generated by the process
* @param varsToSet environment variables to pass on downstream
* @param propsToSet properties to pass on downstream
* @param results results to pass on downstream
* @param command the command that was executed
* @throws WekaException if a problem occurs
*/
protected void handleOutputFailure(int returnCode,
Map<String, String> varsToSet, Map<String, Map<String, String>> propsToSet,
Map<String, LinkedHashSet<Data>> results, String command)
throws WekaException {
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_JOB_FAILURE) > 0) {
Data failure =
new Data(StepManager.CON_JOB_FAILURE,
m_stdErrBuffer.length() > 0 ? m_stdErrBuffer.toString()
: "Process did not complete successfully - return code "
+ returnCode);
failure.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
addAuxToData(failure, varsToSet, propsToSet, results);
getStepManager().outputData(failure);
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
m_instanceOutHeader.attribute(1).setStringValue(
m_stdOutbuffer.length() > 0 ? m_stdOutbuffer.toString() : "");
m_instanceOutHeader.attribute(2).setStringValue(
m_stdErrBuffer.length() > 0 ? m_stdErrBuffer.toString()
: "Process did " + "not complete successfully");
double[] vals = new double[3];
vals[0] = returnCode; // failure code
Instance outputInst = new DenseInstance(1.0, vals);
outputInst.setDataset(m_instanceOutHeader);
Data instOut = new Data(StepManager.CON_INSTANCE, outputInst);
getStepManager().outputData(instOut);
}
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_TEXT) > 0) {
Data textOut = new Data(StepManager.CON_TEXT, m_stdErrBuffer.toString());
textOut.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
"Process did not complete successfully: " + command);
getStepManager().outputData(textOut);
}
}
/**
* Adds auxilliary information to a Data object
*
* @param data the Data object to add to
* @param varsToSet environment variables to add
* @param propsToSet properties to add
* @param results results to add
*/
protected void addAuxToData(Data data, Map<String, String> varsToSet,
Map<String, Map<String, String>> propsToSet,
Map<String, LinkedHashSet<Data>> results) {
if (varsToSet != null) {
data.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
varsToSet);
}
if (propsToSet != null) {
data.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
propsToSet);
}
if (results != null) {
data.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS,
results);
}
}
/**
* Process an incoming Data object
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (!m_structureCheckComplete) {
m_structureCheckComplete = true;
Instances structure = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
structure = ((Instance) data.getPrimaryPayload()).dataset();
} else if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
structure =
((Instance) data.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE))
.dataset();
} else {
structure = data.getPrimaryPayload();
}
checkStructure(structure);
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (data.isIncremental()) {
if (getStepManager().isStreamFinished(data)) {
Data finished = new Data(data.getConnectionName());
if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)
|| data.getConnectionName().equals(StepManager.CON_JOB_SUCCESS)
|| data.getConnectionName().equals(StepManager.CON_JOB_FAILURE)) {
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES));
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES));
}
if (data.getConnectionName().equals(StepManager.CON_JOB_SUCCESS)
|| data.getConnectionName().equals(StepManager.CON_JOB_FAILURE)) {
finished.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS, data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS));
}
getStepManager().throughputFinished(finished);
return;
}
Map<String, String> envVars =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES);
Map<String, Map<String, String>> propsToSet =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES);
Map<String, LinkedHashSet<Data>> results =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS);
if (!m_useDynamic) {
// just do the static thing
getStepManager().throughputUpdateStart();
try {
ProcessBuilder builder = makeStaticProcess();
runProcess(builder, envVars, propsToSet, results);
} catch (Exception ex) {
throw new WekaException(ex);
}
getStepManager().throughputUpdateEnd();
} else {
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)
|| data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
Instance toProcess =
(Instance) (data.getConnectionName().equals(
StepManager.CON_INSTANCE) ? data.getPrimaryPayload() : data
.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE));
getStepManager().throughputUpdateStart();
try {
ProcessBuilder builder = makeDynamicProcess(toProcess);
runProcess(builder, envVars, propsToSet, results);
} catch (Exception ex) {
throw new WekaException(ex);
}
getStepManager().throughputUpdateEnd();
}
}
} else {
getStepManager().processing();
// handle dataset/trainingset/testset conns
Instances toProcess = data.getPrimaryPayload();
for (Instance inst : toProcess) {
try {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
ProcessBuilder builder = makeDynamicProcess(inst);
runProcess(builder, null, null, null);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_INSTANCE) > 0) {
// signal end of output stream
Data finished = new Data(StepManager.CON_INSTANCE);
getStepManager().throughputFinished(finished);
}
getStepManager().finished();
}
}
/**
* Check the incoming instance structure with respect to the attribute names
* specified by the user for the command, args and working directory.
*
* @param structure the incoming instance structure
* @throws WekaException if a problem occurs
*/
protected void checkStructure(Instances structure) throws WekaException {
Attribute cmdAtt = structure.attribute(m_fieldCmd);
if (cmdAtt == null) {
throw new WekaException("Unable to find attribute (" + m_fieldCmd
+ ") holding command to execute in the incoming instance structure");
}
m_cmdFieldIndex = cmdAtt.index();
if (m_fieldArgs != null && m_fieldArgs.length() > 0) {
Attribute argsAtt = structure.attribute(m_fieldArgs);
if (argsAtt == null) {
throw new WekaException("Unable to find attribute (" + m_fieldArgs
+ ") holding command args in the incoming instance structure");
}
m_argsFieldIndex = argsAtt.index();
}
if (m_fieldWorkingDir != null && m_fieldWorkingDir.length() > 0) {
Attribute workingAtt = structure.attribute(m_fieldWorkingDir);
if (workingAtt == null) {
throw new WekaException("Unable to find attribute ("
+ m_fieldWorkingDir
+ ") holding the working directory in the incoming instance stream");
}
m_workingDirFieldIndex = workingAtt.index();
}
}
/**
* Get the acceptable incoming connection types at this point in time
*
* @return a list of acceptable incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
// incoming success/failure can only be used to execute static commands
// incoming instance and environment can be used to execute dynamic commands
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET,
StepManager.CON_ENVIRONMENT, StepManager.CON_JOB_SUCCESS,
StepManager.CON_JOB_FAILURE);
}
return null;
}
/**
* Get a list of possible outgoing connection types at this point in time
*
* @return a list of possible outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
// outgoing instance connections contain att vals with process exit status
// and std out/err
// job success/failure only indicate success/failure and pass on env
// vars/props/results (results are not used/added to by this step)
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_JOB_SUCCESS,
StepManager.CON_JOB_FAILURE, StepManager.CON_TEXT);
}
/**
* Get, if possible, the outgoing instance structure for the supplied incoming
* connection type
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return an Instances object or null if outgoing structure is not applicable
* or cannot be determined
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().numIncomingConnections() == 0
|| (!connectionName.equals(StepManager.CON_INSTANCE) && !connectionName
.equals(StepManager.CON_ENVIRONMENT))) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
return getStepManager().getIncomingStructureForConnectionType(
connectionName);
}
/**
* Get the name of the editor dialog for this step
*
* @return the name of the editor dialog for this step
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ExecuteProcessStepEditorDialog";
}
/**
* Copy an input stream to a writer
*
* @param input the input stream to copy from
* @param out the writer to write to
* @throws IOException if a problem occurs
*/
protected static void copy(InputStream input, Writer out) throws IOException {
InputStreamReader in = new InputStreamReader(input);
int n = 0;
char[] buffer = new char[1024 * 4];
while ((n = in.read(buffer)) != -1) {
out.write(buffer, 0, n);
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Filter.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Filter.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.filters.StreamableFilter;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Step that wraps a Weka filter. Handles dataSet, trainingSet, testSet and
* instance connections.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Filter", category = "Filters",
toolTipText = "Weka filter wrapper", iconPath = "")
public class Filter extends WekaAlgorithmWrapper {
private static final long serialVersionUID = 6857031910153224479L;
/** Template filter */
protected weka.filters.Filter m_filterTemplate;
/** Used when processing streaming data */
protected weka.filters.Filter m_streamingFilter;
/** True if we've been reset */
protected boolean m_isReset;
/** True if we're streaming */
protected boolean m_streaming;
/** True if string attributes are present in streaming case */
protected boolean m_stringAttsPresent;
/** Map of filters that have processed the first batch */
protected Map<Integer, weka.filters.Filter> m_filterMap =
new HashMap<Integer, weka.filters.Filter>();
/** Map of waiting test sets when batch filtering */
protected Map<Integer, Instances> m_waitingTestData =
new HashMap<Integer, Instances>();
/** Data object to reuse when processing incrementally */
protected Data m_incrementalData;
/** Keeps track of the number of train/test batches processed */
protected AtomicInteger m_setCount;
/**
* Get the class of the wrapped algorithm
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.filters.Filter.class;
}
/**
* Set the wrapped algorithm (filter)
*
* @param algo the algorithm to wrap
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultFilter.gif";
String iconp = algo.getClass().getCanonicalName().replace("weka.","") + ".gif";
m_iconPath = StepVisual.BASE_ICON_PATH + iconp;
}
/**
* Set the filter. Calls {@code setWrappedAlgorithm()}
*
* @param filter the filter to use
*/
@ProgrammaticProperty
public void setFilter(weka.filters.Filter filter) {
setWrappedAlgorithm(filter);
}
/**
* Get the filter. Convenience method that calls {@code getWrappedAlgorithm()}
*
* @return the filter
*/
public weka.filters.Filter getFilter() {
return (weka.filters.Filter) getWrappedAlgorithm();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
int numDataset =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_DATASET);
int numTraining =
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstance =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numDataset == 0 && numTraining == 0 && numTesting == 0
&& getFilter() instanceof StreamableFilter) {
result.add(StepManager.CON_INSTANCE);
}
if (numInstance == 0 && numDataset == 0 && numTraining == 0) {
result.add(StepManager.CON_DATASET);
result.add(StepManager.CON_TRAININGSET);
}
if (numInstance == 0 && numTesting == 0) {
result.add(StepManager.CON_TESTSET);
}
return result;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
int numDataset =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_DATASET);
int numTraining =
getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TRAININGSET);
int numTesting =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstance =
getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numInstance > 0) {
result.add(StepManager.CON_INSTANCE);
}
if (numDataset > 0) {
result.add(StepManager.CON_DATASET);
}
if (numTraining > 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (numTesting > 0) {
result.add(StepManager.CON_TESTSET);
}
// info connection - downstream steps can get our wrapped filter
// for information (configuration) purposes
result.add(StepManager.CON_INFO);
return result;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
if (!(getWrappedAlgorithm() instanceof weka.filters.Filter)) {
throw new WekaException("Incorrect type of algorithm");
}
try {
m_filterTemplate = weka.filters.Filter.makeCopy(getFilter());
if (m_filterTemplate instanceof EnvironmentHandler) {
((EnvironmentHandler) m_filterTemplate).setEnvironment(getStepManager()
.getExecutionEnvironment().getEnvironmentVariables());
}
} catch (Exception ex) {
throw new WekaException(ex);
}
m_incrementalData = new Data(StepManager.CON_INSTANCE);
m_filterMap.clear();
m_waitingTestData.clear();
m_streaming = false;
m_stringAttsPresent = false;
m_isReset = true;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Integer setNum = data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
if (m_isReset) {
m_isReset = false;
m_setCount = new AtomicInteger(maxSetNum != null ? maxSetNum : 1);
getStepManager().processing();
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
Instances incomingStructure =
((Instance) data.getPayloadElement(StepManager.CON_INSTANCE))
.dataset();
m_streaming = true;
getStepManager().logBasic("Initializing streaming filter");
try {
m_streamingFilter = weka.filters.Filter.makeCopy(m_filterTemplate);
m_streamingFilter.setInputFormat(incomingStructure);
m_stringAttsPresent =
m_streamingFilter.getOutputFormat().checkForStringAttributes();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
// we are NOT necessarily guaranteed to get tran/test pairs in the order
// of train followed by test (especially if they come from different
// sources). Output from trainTestSplitMaker and XValMaker are guaranteed
// to be in order though
if (m_streaming) {
if (getStepManager().isStreamFinished(data)) {
checkPendingStreaming();
m_incrementalData.clearPayload();
getStepManager().throughputFinished(m_incrementalData);
} else {
processStreaming(data);
}
} else if (data.getConnectionName().equals(StepManager.CON_TRAININGSET)
|| data.getConnectionName().equals(StepManager.CON_DATASET)) {
Instances d = data.getPrimaryPayload();
processFirstBatch(d, data.getConnectionName(), setNum, maxSetNum);
} else {
// if there are just test set connections, then process them as first
// batches. Otherwise, process them as subsequent batches
Instances d = data.getPrimaryPayload();
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) == 0
&& getStepManager().numIncomingConnectionsOfType(
StepManager.CON_DATASET) == 0) {
processFirstBatch(d, data.getConnectionName(), setNum, maxSetNum);
} else {
processSubsequentBatch(d, data.getConnectionName(), setNum, maxSetNum);
}
}
if (isStopRequested()) {
getStepManager().interrupted();
} else if (!m_streaming) {
if (m_setCount.get() == 0) {
getStepManager().finished();
// save memory
m_waitingTestData.clear();
m_filterMap.clear();
}
}
}
/**
* Processes the first batch of instances via the filter
*
* @param batch the batch of instances process
* @param conType the connection type
* @param setNum the set number of this batch
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void processFirstBatch(Instances batch, String conType,
Integer setNum, Integer maxSetNum) throws WekaException {
try {
weka.filters.Filter filterToUse =
weka.filters.Filter.makeCopy(m_filterTemplate);
if (!isStopRequested()) {
filterToUse.setInputFormat(batch);
String message = "Filtering " + conType + " (" + batch.relationName();
if (setNum != null && maxSetNum != null) {
message += ", set " + setNum + " of " + maxSetNum;
}
message += ")";
getStepManager().statusMessage(message);
getStepManager().logBasic(message);
processBatch(batch, conType, filterToUse, setNum, maxSetNum);
if (setNum != null) {
m_filterMap.put(setNum, filterToUse);
} else {
m_filterMap.put(-1, filterToUse);
}
Instances waitingTest = m_waitingTestData.get(setNum);
if (waitingTest != null) {
processSubsequentBatch(waitingTest, StepManager.CON_TESTSET, setNum,
maxSetNum);
} else if (getStepManager().numIncomingConnections() == 1) {
m_setCount.decrementAndGet();
}
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Processes batches of instances that occur after the first batch
*
* @param batch the batch of instances to process
* @param conType the connection type
* @param setNum the set number of this batch
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected synchronized void processSubsequentBatch(Instances batch,
String conType, Integer setNum, Integer maxSetNum) throws WekaException {
Integer sN = setNum != null ? setNum : -1;
weka.filters.Filter filterToUse = m_filterMap.get(sN);
if (filterToUse == null) {
// we've received the test set first...
m_waitingTestData.put(setNum, batch);
return;
}
if (!isStopRequested()) {
String message = "Filtering " + conType + " (" + batch.relationName();
if (setNum != null && maxSetNum != null) {
message += ", set " + setNum + " of " + maxSetNum;
}
message += ") - batch mode";
getStepManager().statusMessage(message);
getStepManager().logBasic(message);
processBatch(batch, conType, filterToUse, setNum, maxSetNum);
}
m_setCount.decrementAndGet();
}
/**
* Process a batch of instances with a supplied filter
*
* @param batch the batch to process
* @param conType the connection type
* @param filterToUse the filter to apply
* @param setNum the set number
* @param maxSetNum the maximum set number
* @throws WekaException if a problem occurs
*/
protected void processBatch(Instances batch, String conType,
weka.filters.Filter filterToUse, Integer setNum, Integer maxSetNum)
throws WekaException {
try {
Instances filtered = weka.filters.Filter.useFilter(batch, filterToUse);
String title = conType + ": " + filtered.relationName();
Data output = new Data(conType, filtered);
if (setNum != null && maxSetNum != null) {
output.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
output.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
output.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, title);
}
getStepManager().outputData(output);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process an instance
*
* @param data the {@code Data} object containing the instance
* @throws WekaException if a problem occurs
*/
protected void processStreaming(Data data) throws WekaException {
Instance toFilter = data.getPrimaryPayload();
getStepManager().throughputUpdateStart();
try {
if (m_streamingFilter.input(toFilter)) {
Instance filteredI = m_streamingFilter.output();
if (m_stringAttsPresent) {
for (int i = 0; i < filteredI.numAttributes(); i++) {
if (filteredI.dataset().attribute(i).isString()
&& !filteredI.isMissing(i)) {
String val = filteredI.stringValue(i);
filteredI.dataset().attribute(i).setStringValue(val);
filteredI.setValue(i, 0);
}
}
}
m_incrementalData
.setPayloadElement(StepManager.CON_INSTANCE, filteredI);
if (!isStopRequested()) {
getStepManager().outputData(m_incrementalData);
}
}
} catch (Exception ex) {
throw new WekaException(ex);
}
getStepManager().throughputUpdateEnd();
}
/**
* Check to see if there are any pending instances to output from the filter
*
* @throws WekaException if a problem occurs
*/
protected void checkPendingStreaming() throws WekaException {
try {
m_streamingFilter.batchFinished();
Instances structureCopy =
m_streamingFilter.getOutputFormat().stringFreeStructure();
while (m_streamingFilter.numPendingOutput() > 0) {
getStepManager().throughputUpdateStart();
Instance filteredI = m_streamingFilter.output();
if (m_stringAttsPresent) {
for (int i = 0; i < filteredI.numAttributes(); i++) {
if (filteredI.attribute(i).isString() && ! filteredI.isMissing(i)) {
String val = filteredI.stringValue(i);
structureCopy.attribute(i).setStringValue(val);
filteredI.setValue(i, 0);
}
}
filteredI.setDataset(structureCopy);
}
m_incrementalData
.setPayloadElement(StepManager.CON_INSTANCE, filteredI);
if (!isStopRequested()) {
getStepManager().outputData(m_incrementalData);
}
getStepManager().throughputUpdateEnd();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
Instances incomingStructure = null;
String incomingConType = null;
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
incomingConType = StepManager.CON_TRAININGSET;
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TESTSET) > 0) {
incomingConType = StepManager.CON_TESTSET;
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
incomingConType = StepManager.CON_DATASET;
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) > 0) {
incomingConType = StepManager.CON_INSTANCE;
}
if (incomingConType != null) {
incomingStructure =
getStepManager().getIncomingStructureForConnectionType(incomingConType);
}
if (incomingStructure != null) {
try {
weka.filters.Filter tempFilter =
weka.filters.Filter.makeCopy(getFilter());
if (tempFilter.setInputFormat(incomingStructure)) {
return tempFilter.getOutputFormat();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/FlowByExpression.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* FlowByExpression.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import javax.swing.tree.DefaultMutableTreeNode;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
/**
* A step that splits incoming instances (or instance streams) according to the
* evaluation of a logical expression. The expression can test the values of one
* or more incoming attributes. The test can involve constants or comparing one
* attribute's values to another. Inequalities along with string operations such
* as contains, starts-with, ends-with and regular expressions may be used as
* operators.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "FlowByExpression",
category = "Flow",
toolTipText = "Route instances according to the evaluation of a logical expression. "
+ "The expression can test the values of "
+ "one or more incoming attributes. The test can involve constants or comparing "
+ "one attribute's values to another. Inequalities along with string operations "
+ "such as contains, starts-with, ends-with and regular expressions may be used "
+ "as operators. \"True\" instances can be sent to one downstream step and "
+ "\"False\" instances sent to another.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "FlowByExpression.png")
public class FlowByExpression extends BaseStep {
private static final long serialVersionUID = 7006511778677802572L;
protected boolean m_isReset;
/** The root of the expression tree */
protected ExpressionNode m_root;
/** The expression tree to use in internal textual format */
protected String m_expressionString = "";
/**
* Name of the step to receive instances that evaluate to true via the
* expression
*/
protected String m_customNameOfTrueStep = "";
/**
* Name of the step to receive instances that evaluate to false via the
* expression
*/
protected String m_customNameOfFalseStep = "";
/** Incoming structure */
protected Instances m_incomingStructure;
/** Keep track of how many incoming batches we've seen */
protected AtomicInteger m_batchCount;
/** True if the "true" step is valid (i.e. exists in the flow) */
protected boolean m_validTrueStep;
/** True if the "false" step is valid (i.e. exists in the flow) */
protected boolean m_validFalseStep;
/** Re-usable data object for streaming */
protected Data m_streamingData;
/**
* Set the expression (in internal format)
*
* @param expressionString the expression to use (in internal format)
*/
public void setExpressionString(String expressionString) {
m_expressionString = expressionString;
}
/**
* Get the current expression (in internal format)
*
* @return the current expression (in internal format)
*/
public String getExpressionString() {
return m_expressionString;
}
/**
* Set the name of the connected step to send "true" instances to
*
* @param trueStep the name of the step to send "true" instances to
*/
public void setTrueStepName(String trueStep) {
m_customNameOfTrueStep = trueStep;
}
/**
* Get the name of the connected step to send "true" instances to
*
* @return the name of the step to send "true" instances to
*/
public String getTrueStepName() {
return m_customNameOfTrueStep;
}
/**
* Set the name of the connected step to send "false" instances to
*
* @param falseStep the name of the step to send "false" instances to
*/
public void setFalseStepName(String falseStep) {
m_customNameOfFalseStep = falseStep;
}
/**
* Get the name of the connected step to send "false" instances to
*
* @return the name of the step to send "false" instances to
*/
public String getFalseStepName() {
return m_customNameOfFalseStep;
}
/**
* Get a list of the names of connected downstream steps
*
* @return a list of the names of connected downstream steps
*/
public List<String> getDownstreamStepNames() {
List<String> result = new ArrayList<String>();
for (List<StepManager> o : getStepManager().getOutgoingConnections()
.values()) {
for (StepManager m : o) {
result.add(m.getName());
}
}
return result;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_streamingData = null;
// see if the specified downstream steps are connected
m_validTrueStep =
getStepManager().getOutgoingConnectedStepWithName(
environmentSubstitute(m_customNameOfTrueStep)) != null;
m_validFalseStep =
getStepManager().getOutgoingConnectedStepWithName(
environmentSubstitute(m_customNameOfFalseStep)) != null;
m_incomingStructure = null;
if (m_expressionString == null || m_expressionString.length() == 0) {
throw new WekaException("No expression defined!");
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET,
StepManager.CON_INSTANCE);
}
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE) == 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
result.add(StepManager.CON_INSTANCE);
} else if (getStepManager().numIncomingConnections() > 0) {
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (getStepManager()
.numIncomingConnectionsOfType(StepManager.CON_TESTSET) > 0) {
result.add(StepManager.CON_TESTSET);
}
}
return result;
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().numIncomingConnections() > 0) {
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
// we assume (and check for at runtime) that all incoming
// batch connections have the same structure, so just taking
// the first connection here is sufficient to determine the
// output structure for any specified output connection type
String incomingConnType = e.getKey();
Instances incomingStruc =
getStepManager().getIncomingStructureFromStep(e.getValue().get(0),
incomingConnType);
return incomingStruc;
}
}
return null;
}
/**
* Main processing routine
*
* @param data incoming data object
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_isReset) {
m_isReset = false;
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) > 0) {
m_streamingData = new Data(StepManager.CON_INSTANCE);
Instance inst = data.getPrimaryPayload();
m_incomingStructure = new Instances(inst.dataset(), 0);
} else {
m_incomingStructure = data.getPrimaryPayload();
m_incomingStructure = new Instances(m_incomingStructure, 0);
m_batchCount =
new AtomicInteger(getStepManager().numIncomingConnections());
getStepManager().processing();
}
m_root = new BracketNode();
m_root.parseFromInternal(m_expressionString);
m_root.init(m_incomingStructure, getStepManager()
.getExecutionEnvironment().getEnvironmentVariables());
}
if (m_streamingData == null) {
// processing batches
Instances batch = data.getPrimaryPayload();
if (!m_incomingStructure.equalHeaders(batch)) {
throw new WekaException("Incoming batches with different structure: "
+ m_incomingStructure.equalHeadersMsg(batch));
}
processBatch(data);
if (isStopRequested()) {
getStepManager().interrupted();
} else if (m_batchCount.get() == 0) {
getStepManager().finished();
}
} else {
// process streaming
processStreaming(data);
if (isStopRequested()) {
getStepManager().interrupted();
}
}
}
/**
* Handles streaming "instance" connections
*
* @param data incoming data object encapsulating an instance to process
* @throws WekaException if a problem occurs
*/
protected void processStreaming(Data data) throws WekaException {
if (getStepManager().isStreamFinished(data)) {
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
return;
}
getStepManager().throughputUpdateStart();
Instance toProcess = data.getPrimaryPayload();
boolean result = m_root.evaluate(toProcess, true);
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, toProcess);
if (result) {
if (m_validTrueStep) {
getStepManager().outputData(StepManager.CON_INSTANCE,
m_customNameOfTrueStep, m_streamingData);
}
} else {
if (m_validFalseStep) {
getStepManager().outputData(StepManager.CON_INSTANCE,
m_customNameOfFalseStep, m_streamingData);
}
}
getStepManager().throughputUpdateEnd();
}
/**
* Processes batch data (dataset, training or test) connections.
*
* @param data the data object to process
* @throws WekaException if a problem occurs
*/
protected void processBatch(Data data) throws WekaException {
Instances incoming = data.getPrimaryPayload();
Instances trueBatch = new Instances(incoming, 0);
Instances falseBatch = new Instances(incoming, 0);
for (int i = 0; i < incoming.numInstances(); i++) {
if (isStopRequested()) {
return;
}
Instance current = incoming.instance(i);
boolean result = m_root.evaluate(current, true);
if (result) {
if (m_validTrueStep) {
trueBatch.add(current);
}
} else {
if (m_validFalseStep) {
falseBatch.add(current);
}
}
}
Integer setNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (m_validTrueStep) {
getStepManager().logDetailed(
"Routing " + trueBatch.numInstances() + " instances to step "
+ m_customNameOfTrueStep);
Data outputData = new Data(data.getConnectionName(), trueBatch);
outputData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outputData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(data.getConnectionName(),
m_customNameOfTrueStep, outputData);
}
if (m_validFalseStep) {
getStepManager().logDetailed(
"Routing " + falseBatch.numInstances() + " instances to step "
+ m_customNameOfFalseStep);
Data outputData = new Data(data.getConnectionName(), falseBatch);
outputData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outputData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(data.getConnectionName(),
m_customNameOfFalseStep, outputData);
}
if (setNum == maxSetNum) {
m_batchCount.decrementAndGet();
}
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.FlowByExpressionStepEditorDialog";
}
/**
* Abstract base class for parts of a boolean expression.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public static abstract class ExpressionNode implements Serializable {
/** For serialization */
private static final long serialVersionUID = -8427857202322768762L;
/** boolean operator for combining with result so far */
protected boolean m_isAnOr;
/** is this node negated? */
protected boolean m_isNegated;
/** Environment variables */
protected transient Environment m_env;
/** Whether to show the combination operator in the textual representation */
protected boolean m_showAndOr = true;
/**
* Set whether this node is to be OR'ed to the result so far
*
* @param isOr true if this node is to be OR'd
*/
public void setIsOr(boolean isOr) {
m_isAnOr = isOr;
}
/**
* Get whether this node is to be OR'ed
*
* @return true if this node is to be OR'ed with the result so far
*/
public boolean isOr() {
return m_isAnOr;
}
/**
* Get whether this node is negated.
*
* @return
*/
public boolean isNegated() {
return m_isNegated;
}
/**
* Set whether this node is negated
*
* @param negated true if this node is negated
*/
public void setNegated(boolean negated) {
m_isNegated = negated;
}
/**
* Set whether to show the combination operator in the textual description
*
* @param show true if the combination operator is to be shown
*/
public void setShowAndOr(boolean show) {
m_showAndOr = show;
}
/**
* Initialize the node
*
* @param structure the structure of the incoming instances
* @param env Environment variables
*/
public void init(Instances structure, Environment env) {
m_env = env;
}
/**
* Evaluate this node and combine with the result so far
*
* @param inst the incoming instance to evalute with
* @param result the result to combine with
* @return the result after combining with this node
*/
public abstract boolean evaluate(Instance inst, boolean result);
/**
* Get the internal representation of this node
*
* @param buff the string buffer to append to
*/
public abstract void toStringInternal(StringBuffer buff);
/**
* Get the display representation of this node
*
* @param buff the string buffer to append to
*/
public abstract void toStringDisplay(StringBuffer buff);
/**
* Parse and initialize from the internal representation
*
* @param expression the expression to parse in internal representation
* @return the remaining parts of the expression after parsing and removing
* the part for this node
*/
protected abstract String parseFromInternal(String expression);
/**
* Get a DefaultMutableTreeNode for this node
*
* @param parent the parent of this node (if any)
* @return the DefaultMutableTreeNode for this node
*/
public abstract DefaultMutableTreeNode
toJTree(DefaultMutableTreeNode parent);
}
/**
* An expression node that encloses other expression nodes in brackets
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public static class BracketNode extends ExpressionNode implements
Serializable {
/** For serialization */
private static final long serialVersionUID = 8732159083173001115L;
protected List<ExpressionNode> m_children = new ArrayList<ExpressionNode>();
@Override
public void init(Instances structure, Environment env) {
super.init(structure, env);
for (ExpressionNode n : m_children) {
n.init(structure, env);
}
}
@Override
public boolean evaluate(Instance inst, boolean result) {
boolean thisNode = true;
if (m_children.size() > 0) {
for (ExpressionNode n : m_children) {
thisNode = n.evaluate(inst, thisNode);
}
if (isNegated()) {
thisNode = !thisNode;
}
}
return (isOr() ? (result || thisNode) : (result && thisNode));
}
/**
* Add a child to this bracket node
*
* @param child the ExpressionNode to add
*/
public void addChild(ExpressionNode child) {
m_children.add(child);
if (m_children.size() > 0) {
m_children.get(0).setShowAndOr(false);
}
}
/**
* Remove a child from this bracket node
*
* @param child the ExpressionNode to remove
*/
public void removeChild(ExpressionNode child) {
m_children.remove(child);
if (m_children.size() > 0) {
m_children.get(0).setShowAndOr(false);
}
}
@Override
public String toString() {
// just the representation of this node (suitable for the abbreviated
// JTree node label
String result = "( )";
if (isNegated()) {
result = "!" + result;
}
if (m_showAndOr) {
if (m_isAnOr) {
result = "|| " + result;
} else {
result = "&& " + result;
}
}
return result;
}
@Override
public DefaultMutableTreeNode toJTree(DefaultMutableTreeNode parent) {
DefaultMutableTreeNode current = new DefaultMutableTreeNode(this);
if (parent != null) {
parent.add(current);
}
for (ExpressionNode child : m_children) {
child.toJTree(current);
}
return current;
}
private void toString(StringBuffer buff, boolean internal) {
if (m_children.size() >= 0) {
if (internal || m_showAndOr) {
if (m_isAnOr) {
buff.append("|| ");
} else {
buff.append("&& ");
}
}
if (isNegated()) {
buff.append("!");
}
buff.append("(");
int count = 0;
for (ExpressionNode child : m_children) {
if (internal) {
child.toStringInternal(buff);
} else {
child.toStringDisplay(buff);
}
count++;
if (count != m_children.size()) {
buff.append(" ");
}
}
buff.append(")");
}
}
@Override
public void toStringDisplay(StringBuffer buff) {
toString(buff, false);
}
@Override
public void toStringInternal(StringBuffer buff) {
toString(buff, true);
}
@Override
public String parseFromInternal(String expression) {
if (expression.startsWith("|| ")) {
m_isAnOr = true;
}
if (expression.startsWith("|| ") || expression.startsWith("&& ")) {
expression = expression.substring(3, expression.length());
}
if (expression.charAt(0) == '!') {
setNegated(true);
expression = expression.substring(1, expression.length());
}
if (expression.charAt(0) != '(') {
throw new IllegalArgumentException(
"Malformed expression! Was expecting a \"(\"");
}
expression = expression.substring(1, expression.length());
while (expression.charAt(0) != ')') {
int offset = 3;
if (expression.charAt(offset) == '(') {
ExpressionNode child = new BracketNode();
expression = child.parseFromInternal(expression);
m_children.add(child);
} else {
// must be an ExpressionClause
ExpressionNode child = new ExpressionClause();
expression = child.parseFromInternal(expression);
m_children.add(child);
}
}
if (m_children.size() > 0) {
m_children.get(0).setShowAndOr(false);
}
return expression;
}
}
/**
* An expression node that represents a clause of an expression
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
public static class ExpressionClause extends ExpressionNode implements
Serializable {
/** For serialization */
private static final long serialVersionUID = 2754006654981248325L;
/** The operator for this expression */
protected ExpressionType m_operator;
/** The name of the lhs attribute */
protected String m_lhsAttributeName;
/** The index of the lhs attribute */
protected int m_lhsAttIndex = -1;
/** The rhs operand (constant value or attribute name) */
protected String m_rhsOperand;
/** True if the rhs operand is an attribute */
protected boolean m_rhsIsAttribute;
/** index of the rhs if it is an attribute */
protected int m_rhsAttIndex = -1;
/** The name of the lhs attribute after resolving variables */
protected String m_resolvedLhsName;
/** The rhs operand after resolving variables */
protected String m_resolvedRhsOperand;
/** the compiled regex pattern (if the operator is REGEX) */
protected Pattern m_regexPattern;
/** The rhs operand (if constant and is a number ) */
protected double m_numericOperand;
public static enum ExpressionType {
EQUALS(" = ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (rhsIsAttribute) {
if (inst.isMissing(lhsAttIndex) && inst.isMissing(rhsAttIndex)) {
return true;
}
if (inst.isMissing(lhsAttIndex) || inst.isMissing(rhsAttIndex)) {
return false;
}
return Utils.eq(inst.value(lhsAttIndex), inst.value(rhsAttIndex));
}
if (inst.isMissing(lhsAttIndex)) {
return false;
}
return (Utils.eq(inst.value(lhsAttIndex), numericOperand));
}
},
NOTEQUAL(" != ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
return !EQUALS.evaluate(inst, lhsAttIndex, rhsOperand,
numericOperand, regexPattern, rhsIsAttribute, rhsAttIndex);
}
},
LESSTHAN(" < ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (rhsIsAttribute) {
if (inst.isMissing(lhsAttIndex) || inst.isMissing(rhsAttIndex)) {
return false;
}
return (inst.value(lhsAttIndex) < inst.value(rhsAttIndex));
}
if (inst.isMissing(lhsAttIndex)) {
return false;
}
return (inst.value(lhsAttIndex) < numericOperand);
}
},
LESSTHANEQUAL(" <= ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (rhsIsAttribute) {
if (inst.isMissing(lhsAttIndex) || inst.isMissing(rhsAttIndex)) {
return false;
}
return (inst.value(lhsAttIndex) <= inst.value(rhsAttIndex));
}
if (inst.isMissing(lhsAttIndex)) {
return false;
}
return (inst.value(lhsAttIndex) <= numericOperand);
}
},
GREATERTHAN(" > ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
return !LESSTHANEQUAL.evaluate(inst, lhsAttIndex, rhsOperand,
numericOperand, regexPattern, rhsIsAttribute, rhsAttIndex);
}
},
GREATERTHANEQUAL(" >= ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
return !LESSTHAN.evaluate(inst, lhsAttIndex, rhsOperand,
numericOperand, regexPattern, rhsIsAttribute, rhsAttIndex);
}
},
ISMISSING(" isMissing ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
return (inst.isMissing(lhsAttIndex));
}
},
CONTAINS(" contains ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (inst.isMissing(lhsAttIndex)) {
return false;
}
String lhsString = "";
try {
lhsString = inst.stringValue(lhsAttIndex);
} catch (IllegalArgumentException ex) {
return false;
}
if (rhsIsAttribute) {
if (inst.isMissing(rhsAttIndex)) {
return false;
}
try {
String rhsString = inst.stringValue(rhsAttIndex);
return lhsString.contains(rhsString);
} catch (IllegalArgumentException ex) {
return false;
}
}
return lhsString.contains(rhsOperand);
}
},
STARTSWITH(" startsWith ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (inst.isMissing(lhsAttIndex)) {
return false;
}
String lhsString = "";
try {
lhsString = inst.stringValue(lhsAttIndex);
} catch (IllegalArgumentException ex) {
return false;
}
if (rhsIsAttribute) {
if (inst.isMissing(rhsAttIndex)) {
return false;
}
try {
String rhsString = inst.stringValue(rhsAttIndex);
return lhsString.startsWith(rhsString);
} catch (IllegalArgumentException ex) {
return false;
}
}
return lhsString.startsWith(rhsOperand);
}
},
ENDSWITH(" endsWith ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (inst.isMissing(lhsAttIndex)) {
return false;
}
String lhsString = "";
try {
lhsString = inst.stringValue(lhsAttIndex);
} catch (IllegalArgumentException ex) {
return false;
}
if (rhsIsAttribute) {
if (inst.isMissing(rhsAttIndex)) {
return false;
}
try {
String rhsString = inst.stringValue(rhsAttIndex);
return lhsString.endsWith(rhsString);
} catch (IllegalArgumentException ex) {
return false;
}
}
return lhsString.endsWith(rhsOperand);
}
},
REGEX(" regex ") {
@Override
boolean evaluate(Instance inst, int lhsAttIndex, String rhsOperand,
double numericOperand, Pattern regexPattern, boolean rhsIsAttribute,
int rhsAttIndex) {
if (inst.isMissing(lhsAttIndex)) {
return false;
}
if (regexPattern == null) {
return false;
}
String lhsString = "";
try {
lhsString = inst.stringValue(lhsAttIndex);
} catch (IllegalArgumentException ex) {
return false;
}
return regexPattern.matcher(lhsString).matches();
}
};
abstract boolean evaluate(Instance inst, int lhsAttIndex,
String rhsOperand, double numericOperand, Pattern regexPattern,
boolean rhsIsAttribute, int rhsAttIndex);
private final String m_stringVal;
ExpressionType(String name) {
m_stringVal = name;
}
@Override
public String toString() {
return m_stringVal;
}
}
public ExpressionClause() {
}
/**
* Construct a new ExpressionClause
*
* @param operator the operator to use
* @param lhsAttributeName the lhs attribute name
* @param rhsOperand the rhs operand
* @param rhsIsAttribute true if the rhs operand is an attribute
* @param isAnOr true if the result of this expression is to be OR'ed with
* the result so far
*/
public ExpressionClause(ExpressionType operator, String lhsAttributeName,
String rhsOperand, boolean rhsIsAttribute, boolean isAnOr) {
m_operator = operator;
m_lhsAttributeName = lhsAttributeName;
m_rhsOperand = rhsOperand;
m_rhsIsAttribute = rhsIsAttribute;
m_isAnOr = isAnOr;
}
/**
* Get the lhs attribute name
*
* @return the lhs attribute name
*/
public String getLHSAttName() {
return m_lhsAttributeName;
}
/**
* Set the lhs attribute name
*
* @param attName the lhs att naem
*/
public void setLHSAttName(String attName) {
m_lhsAttributeName = attName;
}
/**
* Get the rhs operand
*
* @return the rhs operando
*/
public String getRHSOperand() {
return m_rhsOperand;
}
/**
* Set the rhs operand
*
* @param opp the rhs operand to set
*/
public void setRHSOperand(String opp) {
m_rhsOperand = opp;
}
/**
* Returns true if the RHS is an attribute rather than a constant
*
* @return true if the RHS is an attribute
*/
public boolean isRHSAnAttribute() {
return m_rhsIsAttribute;
}
/**
* Set whether the RHS is an attribute rather than a constant
*
* @param rhs true if the RHS is an attribute rather than a constant
*/
public void setRHSIsAnAttribute(boolean rhs) {
m_rhsIsAttribute = rhs;
}
/**
* Get the operator
*
* @return the operator
*/
public ExpressionType getOperator() {
return m_operator;
}
/**
* Set the operator
*
* @param opp the operator to use
*/
public void setOperator(ExpressionType opp) {
m_operator = opp;
}
@Override
public void init(Instances structure, Environment env) {
super.init(structure, env);
m_resolvedLhsName = m_lhsAttributeName;
m_resolvedRhsOperand = m_rhsOperand;
try {
m_resolvedLhsName = m_env.substitute(m_resolvedLhsName);
m_resolvedRhsOperand = m_env.substitute(m_resolvedRhsOperand);
} catch (Exception ex) {
}
Attribute lhs = null;
// try as an index or "special" label first
if (m_resolvedLhsName.toLowerCase().startsWith("/first")) {
lhs = structure.attribute(0);
} else if (m_resolvedLhsName.toLowerCase().startsWith("/last")) {
lhs = structure.attribute(structure.numAttributes() - 1);
} else {
// try as an index
try {
int indx = Integer.parseInt(m_resolvedLhsName);
indx--;
lhs = structure.attribute(indx);
} catch (NumberFormatException ex) {
// ignore
}
}
if (lhs == null) {
lhs = structure.attribute(m_resolvedLhsName);
}
if (lhs == null) {
throw new IllegalArgumentException("Data does not contain attribute "
+ "\"" + m_resolvedLhsName + "\"");
}
m_lhsAttIndex = lhs.index();
if (m_rhsIsAttribute) {
Attribute rhs = null;
// try as an index or "special" label first
if (m_resolvedRhsOperand.toLowerCase().equals("/first")) {
rhs = structure.attribute(0);
} else if (m_resolvedRhsOperand.toLowerCase().equals("/last")) {
rhs = structure.attribute(structure.numAttributes() - 1);
} else {
// try as an index
try {
int indx = Integer.parseInt(m_resolvedRhsOperand);
indx--;
rhs = structure.attribute(indx);
} catch (NumberFormatException ex) {
// ignore
}
}
if (rhs == null) {
rhs = structure.attribute(m_resolvedRhsOperand);
}
if (rhs == null) {
throw new IllegalArgumentException("Data does not contain attribute "
+ "\"" + m_resolvedRhsOperand + "\"");
}
m_rhsAttIndex = rhs.index();
} else if (m_operator != ExpressionType.CONTAINS
&& m_operator != ExpressionType.STARTSWITH
&& m_operator != ExpressionType.ENDSWITH
&& m_operator != ExpressionType.REGEX
&& m_operator != ExpressionType.ISMISSING) {
// make sure the operand is parseable as a number (unless missing has
// been specified - equals only)
if (lhs.isNominal()) {
m_numericOperand = lhs.indexOfValue(m_resolvedRhsOperand);
if (m_numericOperand < 0) {
throw new IllegalArgumentException("Unknown nominal value '"
+ m_resolvedRhsOperand + "' for attribute '" + lhs.name() + "'");
}
} else {
try {
m_numericOperand = Double.parseDouble(m_resolvedRhsOperand);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("\"" + m_resolvedRhsOperand
+ "\" is not parseable as a number!");
}
}
}
if (m_operator == ExpressionType.REGEX) {
m_regexPattern = Pattern.compile(m_resolvedRhsOperand);
}
}
@Override
public boolean evaluate(Instance inst, boolean result) {
boolean thisNode =
m_operator.evaluate(inst, m_lhsAttIndex, m_rhsOperand,
m_numericOperand, m_regexPattern, m_rhsIsAttribute, m_rhsAttIndex);
if (isNegated()) {
thisNode = !thisNode;
}
return (isOr() ? (result || thisNode) : (result && thisNode));
}
@Override
public String toString() {
StringBuffer buff = new StringBuffer();
toStringDisplay(buff);
return buff.toString();
}
@Override
public void toStringDisplay(StringBuffer buff) {
toString(buff, false);
}
@Override
public void toStringInternal(StringBuffer buff) {
toString(buff, true);
}
@Override
public DefaultMutableTreeNode toJTree(DefaultMutableTreeNode parent) {
parent.add(new DefaultMutableTreeNode(this));
return parent;
}
private void toString(StringBuffer buff, boolean internal) {
if (internal || m_showAndOr) {
if (m_isAnOr) {
buff.append("|| ");
} else {
buff.append("&& ");
}
}
if (isNegated()) {
buff.append("!");
}
buff.append("[");
buff.append(m_lhsAttributeName);
if (internal) {
buff.append("@EC@" + m_operator.toString());
} else {
buff.append(" " + m_operator.toString());
}
if (m_operator != ExpressionType.ISMISSING) {
// @@ indicates that the rhs is an attribute
if (internal) {
buff.append("@EC@" + (m_rhsIsAttribute ? "@@" : "") + m_rhsOperand);
} else {
buff.append(" " + (m_rhsIsAttribute ? "ATT: " : "") + m_rhsOperand);
}
} else {
if (internal) {
buff.append("@EC@");
} else {
buff.append(" ");
}
}
buff.append("]");
}
@Override
protected String parseFromInternal(String expression) {
// first the boolean operator for this clause
if (expression.startsWith("|| ")) {
m_isAnOr = true;
}
if (expression.startsWith("|| ") || expression.startsWith("&& ")) {
// strip the boolean operator
expression = expression.substring(3, expression.length());
}
if (expression.charAt(0) == '!') {
setNegated(true);
expression = expression.substring(1, expression.length());
}
if (expression.charAt(0) != '[') {
throw new IllegalArgumentException(
"Was expecting a \"[\" to start this ExpressionClause!");
}
expression = expression.substring(1, expression.length());
m_lhsAttributeName = expression.substring(0, expression.indexOf("@EC@"));
expression =
expression.substring(expression.indexOf("@EC@") + 4,
expression.length());
String oppName = expression.substring(0, expression.indexOf("@EC@"));
expression =
expression.substring(expression.indexOf("@EC@") + 4,
expression.length());
for (ExpressionType n : ExpressionType.values()) {
if (n.toString().equals(oppName)) {
m_operator = n;
break;
}
}
if (expression.startsWith("@@")) {
// rhs is an attribute
expression = expression.substring(2, expression.length()); // strip off
// "@@"
m_rhsIsAttribute = true;
}
m_rhsOperand = expression.substring(0, expression.indexOf(']'));
expression =
expression.substring(expression.indexOf(']') + 1, expression.length()); // remove
// "]"
if (expression.charAt(0) == ' ') {
expression = expression.substring(1, expression.length());
}
return expression;
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/GetDataFromResult.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* GetDataFromResult.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.StepManager;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Step that outputs data stored in the job environment
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "GetDataFromResult", category = "Flow",
toolTipText = "Output data stored in the job environment",
iconPath = KFGUIConsts.BASE_ICON_PATH + "GetDataFromResult.gif")
public class GetDataFromResult extends BaseStep {
private static final long serialVersionUID = 7447845310997458636L;
@Override
public void stepInit() throws WekaException {
}
@Override
public void start() throws WekaException {
if (getStepManager().numIncomingConnections() == 0
&& getStepManager().getExecutionEnvironment().getEnvironmentVariables() instanceof JobEnvironment) {
JobEnvironment jobEnvironment =
(JobEnvironment) getStepManager().getExecutionEnvironment()
.getEnvironmentVariables();
outputDataFromResult(jobEnvironment.getResultData());
}
}
@SuppressWarnings("unchecked")
@Override
public void processIncoming(Data data) throws WekaException {
outputDataFromResult((Map<String, LinkedHashSet<Data>>) data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS));
}
protected void outputDataFromResult(Map<String, LinkedHashSet<Data>> results)
throws WekaException {
if (results != null && results.size() > 0) {
getStepManager().processing();
Set<String> outConns = getStepManager().getOutgoingConnections().keySet();
for (String conn : outConns) {
LinkedHashSet<Data> connData = results.get(conn);
if (connData != null) {
for (Data d : connData) {
getStepManager().outputData(d);
}
} else {
getStepManager().logBasic(
"No results of type '" + conn + "' in job " + "environment");
}
}
} else {
getStepManager().logBasic("No results to output from job environment");
}
getStepManager().finished();
}
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_JOB_SUCCESS);
}
return null;
}
@Override
public List<String> getOutgoingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET, StepManager.CON_BATCH_CLASSIFIER,
StepManager.CON_BATCH_CLUSTERER, StepManager.CON_BATCH_ASSOCIATOR,
StepManager.CON_TEXT, StepManager.CON_IMAGE);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/GraphViewer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* GraphViewer
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import weka.core.WekaException;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Step for collecting and visualizing graph output from Drawable schemes.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "GraphViewer", category = "Visualization",
toolTipText = "Visualize graph output from Drawable schemes",
iconPath = StepVisual.BASE_ICON_PATH + "DefaultGraph.gif")
public class GraphViewer extends BaseSimpleDataVisualizer {
private static final long serialVersionUID = -3256888744740965144L;
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_GRAPH);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return getStepManager().numIncomingConnections() > 0 ? Arrays
.asList(StepManager.CON_TEXT) : null;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String graphTitle =
data.getPayloadElement(StepManager.CON_AUX_DATA_GRAPH_TITLE);
getStepManager().logDetailed(graphTitle);
m_data.add(data);
Data textOut = new Data(StepManager.CON_TEXT, data.getPrimaryPayload());
textOut.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, graphTitle);
getStepManager().outputData(textOut);
getStepManager().finished();
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_data.size() > 0) {
views.put("Show plots",
"weka.gui.knowledgeflow.steps.GraphViewerInteractiveView");
}
return views;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ImageSaver.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ImageSaver.java
*
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*/
package weka.knowledgeflow.steps;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import javax.imageio.ImageIO;
import weka.core.Defaults;
import weka.core.OptionMetadata;
import weka.core.Settings;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Step for saving static images as either png or gif.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ImageSaver", category = "DataSinks",
toolTipText = "Save static images to a file",
iconPath = KFGUIConsts.BASE_ICON_PATH + "SerializedModelSaver.gif")
public class ImageSaver extends BaseStep {
private static final long serialVersionUID = -8766164679635957891L;
protected static enum ImageFormat {
DEFAULT, PNG, GIF
};
/** The file to save to */
protected File m_file = new File("");
/** Default location to write to, in case a file has not been explicitly set */
protected String m_defaultFile = "";
/**
* Format to save to. If set to DEFAULT, then the default format the user has
* set in the settings for this step is used.
*/
protected ImageFormat m_format = ImageFormat.DEFAULT;
/**
* Default format to use - read from the settings for this step, and used in
* the case when the user has selected/left DEFAULT as the format type in the
* step's options. Must not be set to the type DEFAULT of course :-)
*/
protected ImageFormat m_defaultFormat;
/**
* Gets incremented by 1 for each image received during execution. Can be used
* (via the image_count variable) to ensure that each image gets saved to a
* different file when there are multiple images expected during execution.
*/
protected int m_imageCounter;
/**
* Set the file to save to
*
* @param f the file to save to
*/
@OptionMetadata(
displayName = "File to save to",
description = "<html>The file to save an image to<br>The variable 'image_count' may be "
+ "used as<br>part of the filename/path in order to differentiate<br>"
+ "multiple images.</html>", displayOrder = 1)
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = false)
public void setFile(File f) {
m_file = f;
}
/**
* Get the file to save to
*
* @return the file to save to
*/
public File getFile() {
return m_file;
}
/**
* Set the format of the image to save
*
* @param format
*/
@OptionMetadata(displayName = "Format to save image as",
description = "Format to save to", displayOrder = 2)
public void setFormat(ImageFormat format) {
m_format = format;
}
/**
* Get the format of the image to save
*
* @return the format of the image to save
*/
public ImageFormat getFormat() {
return m_format;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_imageCounter = 1;
m_defaultFile = getFile().toString();
if (m_defaultFile == null || m_defaultFile.length() == 0) {
File defaultF =
getStepManager().getSettings().getSetting(ImageSaverDefaults.ID,
ImageSaverDefaults.DEFAULT_FILE_KEY, ImageSaverDefaults.DEFAULT_FILE,
getStepManager().getExecutionEnvironment().getEnvironmentVariables());
m_defaultFile = defaultF.toString();
}
if (m_format == ImageFormat.DEFAULT) {
m_defaultFormat =
getStepManager().getSettings().getSetting(ImageSaverDefaults.ID,
ImageSaverDefaults.DEFAULT_FORMAT_KEY,
ImageSaverDefaults.DEFAULT_FORMAT,
getStepManager().getExecutionEnvironment().getEnvironmentVariables());
if (m_defaultFormat == ImageFormat.DEFAULT) {
throw new WekaException("The default format to use must be something "
+ "other than 'DEFAULT'");
}
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_IMAGE);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
ImageFormat formatToUse =
m_format == ImageFormat.DEFAULT ? m_defaultFormat : m_format;
BufferedImage content = data.getPrimaryPayload();
getStepManager().getExecutionEnvironment().getEnvironmentVariables()
.addVariable("image_count", "" + m_imageCounter++);
String fileName = getFile().toString();
if (fileName == null || fileName.length() == 0) {
fileName = m_defaultFile;
}
fileName = environmentSubstitute(fileName);
if (!(new File(fileName)).isDirectory()) {
if (!fileName.toLowerCase()
.endsWith(formatToUse.toString().toLowerCase())) {
fileName += "." + formatToUse.toString().toLowerCase();
}
File file = new File(fileName);
getStepManager().logDetailed("Writing image to " + fileName);
try {
ImageIO.write(content, formatToUse.toString().toLowerCase(), file);
} catch (IOException ex) {
throw new WekaException(ex);
}
} else {
getStepManager().logWarning(
"Unable to write image because '" + fileName + "' is a directory!");
}
if (!isStopRequested()) {
getStepManager().finished();
} else {
getStepManager().interrupted();
}
}
/**
* Get default settings for the step.
*
* @return the default settings
*/
@Override
public Defaults getDefaultSettings() {
return new ImageSaverDefaults();
}
public static final class ImageSaverDefaults extends Defaults {
public static final String ID = "weka.knowledgeflow.steps.imagesaver";
public static final Settings.SettingKey DEFAULT_FILE_KEY =
new Settings.SettingKey(ID + ".defaultFile", "Default file to save to",
"Save to this file if the user has "
+ "not explicitly set one in the step");
public static final File DEFAULT_FILE = new File("${user.dir}/image");
public static final Settings.SettingKey DEFAULT_FORMAT_KEY =
new Settings.SettingKey(ID + ".defaultFormat", "Default image format to "
+ "write", "Default image format to write in the case that the user "
+ "has explicitly set 'DEFAULT' in the step's options");
public static final ImageFormat DEFAULT_FORMAT = ImageFormat.PNG;
private static final long serialVersionUID = -2739579935119189195L;
public ImageSaverDefaults() {
super(ID);
m_defaults.put(DEFAULT_FILE_KEY, DEFAULT_FILE);
m_defaults.put(DEFAULT_FORMAT_KEY, DEFAULT_FORMAT);
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ImageViewer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ImageViewer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A step for collecting and viewing image data
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ImageViewer", category = "Visualization",
toolTipText = "View images", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "StripChart.gif")
public class ImageViewer extends BaseStep implements DataCollector {
private static final long serialVersionUID = -4055716444227948343L;
/** Holds the received images */
protected Map<String, BufferedImage> m_images =
new LinkedHashMap<String, BufferedImage>();
/**
* Initialize the step. Nothing to do in the case of this step
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Get a list of acceptable incoming connections - only StepManager.CON_IMAGE
* in this case
*
* @return a list of acceptable incoming connections
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_IMAGE);
}
/**
* Get a list of outgoing connections that can be generated given the current
* state of the step - will produce StepManager.CON_IMAGE data if we have at
* least one incoming image connection connection
*
* @return a list of outgoing connections
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return getStepManager().numIncomingConnectionsOfType(StepManager.CON_IMAGE) > 0 ? Arrays
.asList(StepManager.CON_IMAGE) : new ArrayList<String>();
}
/**
* Process incoming image data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String imageTitle =
data.getPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE);
BufferedImage image = data.getPrimaryPayload();
if (image == null) {
throw new WekaException("Data does not seem to contain an image!");
}
String date = (new SimpleDateFormat("HH:mm:ss.SSS - ")).format(new Date());
if (imageTitle != null) {
imageTitle = date + imageTitle;
} else {
imageTitle = date + "Untitled";
}
m_images.put(imageTitle, image);
getStepManager().logDetailed("Storing image: " + imageTitle);
// pass on downstream
getStepManager().outputData(data);
getStepManager().finished();
}
/**
* Get a map of named images that this step has collected
*
* @return a map of named images
*/
public Map<String, BufferedImage> getImages() {
return m_images;
}
/**
* Gets a list of classes of viewers that can be popped up in the GUI
* Knowledge Flow from this step, given that we have received and stored some
* image data.
*
* @return a list of viewer classes
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_images.size() > 0) {
views.put("Show images",
"weka.gui.knowledgeflow.steps.ImageViewerInteractiveView");
}
return views;
}
/**
* Retrieve the data stored in this step. This is a map of png image data (as
* raw bytes), keyed by image name.
*
* @return the data stored in this step
*/
@Override
public Object retrieveData() {
// As BufferedImage is not serializable, we need to store raw
// png bytes in a map.
return bufferedImageMapToSerializableByteMap(m_images);
}
/**
* Restore data for this step. Argument is expected to be a map of png image
* data (as raw bytes) keyed by image name
*
* @param data the data to set
* @throws WekaException if a problem occurs
*/
@SuppressWarnings("unchecked")
@Override
public void restoreData(Object data) throws WekaException {
if (!(data instanceof Map)) {
throw new IllegalArgumentException("Argument for restoring data must "
+ "be a map");
}
try {
m_images = byteArrayImageMapToBufferedImageMap((Map<String, byte[]>) data);
} catch (IOException e) {
throw new WekaException(e);
}
}
/**
* Utility method to convert a map of {@code byte[]} png image data to
* a map of {@code BufferedImage}
*
* @param dataMap the map containing raw png byte arrays
* @return a map of {@code BufferedImage}s
* @throws IOException if a problem occurs
*/
public static Map<String, BufferedImage> byteArrayImageMapToBufferedImageMap(
Map<String, byte[]> dataMap) throws IOException {
Map<String, BufferedImage> restored =
new LinkedHashMap<String, BufferedImage>();
// need to restore from map of raw png byte data
Map<String, byte[]> serializableMap = (Map<String, byte[]>) dataMap;
for (Map.Entry<String, byte[]> e : serializableMap.entrySet()) {
String title = e.getKey();
byte[] png = e.getValue();
ByteArrayInputStream bais = new ByteArrayInputStream(png);
BufferedImage bi = ImageIO.read(bais);
if (bi != null) {
restored.put(title, bi);
}
}
return restored;
}
/**
* Utility method to convert a map of {@code BufferedImage} to a map of byte
* arrays (that hold each image as png bytes)
*
* @param images the map of {@code BufferedImage}s to convert
* @return a map of png byte arrays
*/
public static Map<String, byte[]> bufferedImageMapToSerializableByteMap(
Map<String, BufferedImage> images) {
Map<String, byte[]> serializableMap = new LinkedHashMap<String, byte[]>();
for (Map.Entry<String, BufferedImage> e : images.entrySet()) {
String title = e.getKey();
BufferedImage b = e.getValue();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
ImageIO.write(b, "png", baos);
serializableMap.put(title, baos.toByteArray());
} catch (IOException ex) {
ex.printStackTrace();
}
}
return serializableMap;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/IncrementalClassifierEvaluator.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* IncrementalClassifierEvaluator.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.evaluation.Evaluation;
import weka.core.Instance;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
/**
* Step that evaluates incremental classifiers and produces strip chart data
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "IncrementalClassifierEvaluator",
category = "Evaluation",
toolTipText = "Evaluate the performance of incrementally training classifiers",
iconPath = KFGUIConsts.BASE_ICON_PATH + "IncrementalClassifierEvaluator.gif")
public class IncrementalClassifierEvaluator extends BaseStep {
private static final long serialVersionUID = -5951569492213633100L;
/** Legend information */
protected List<String> m_dataLegend;
/** Actual data point values */
protected double[] m_dataPoint;
/** Re-usable chart data */
protected Data m_chartData = new Data(StepManager.CON_CHART);
protected double m_min = Double.MAX_VALUE;
protected double m_max = Double.MIN_VALUE;
/** how often (in milliseconds) to report throughput to the log */
protected int m_statusFrequency = 2000;
/** Count of instances seen */
protected int m_instanceCount;
// output info retrieval and auc stats for each class (if class is nominal)
protected boolean m_outputInfoRetrievalStats;
/** Main eval object */
protected Evaluation m_eval;
/**
* window size for computing performance metrics - 0 means no window, i.e
* don't "forget" performance on any instances
*/
protected int m_windowSize;
/** Evaluation object for window */
protected Evaluation m_windowEval;
/** Window instances */
protected LinkedList<Instance> m_window;
/** Window predictions */
protected LinkedList<double[]> m_windowedPreds;
/** True if rest */
protected boolean m_reset;
/** Holds the name of the classifier being used */
protected String m_classifierName;
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_instanceCount = 0;
m_dataPoint = new double[1];
m_dataLegend = new ArrayList<String>();
if (m_windowSize > 0) {
m_window = new LinkedList<Instance>();
m_windowedPreds = new LinkedList<double[]>();
getStepManager().logBasic(
"Chart output using windowed " + "evaluation over " + m_windowSize
+ " instances");
}
m_reset = true;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INCREMENTAL_CLASSIFIER);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INCREMENTAL_CLASSIFIER) > 0) {
result.add(StepManager.CON_TEXT);
result.add(StepManager.CON_CHART);
}
return result;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
// done
// notify downstream steps of end of stream
Data d = new Data(StepManager.CON_CHART);
getStepManager().throughputFinished(d);
// save memory if using windowed evaluation
m_windowEval = null;
m_window = null;
m_windowedPreds = null;
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_TEXT) > 0) {
try {
String textTitle = m_classifierName;
String results =
"=== Performance information ===\n\n" + "Scheme: " + textTitle
+ "\n" + "Relation: " + m_eval.getHeader().relationName()
+ "\n\n" + m_eval.toSummaryString();
if (m_eval.getHeader().classIndex() >= 0
&& m_eval.getHeader().classAttribute().isNominal()
&& (m_outputInfoRetrievalStats)) {
results += "\n" + m_eval.toClassDetailsString();
}
if (m_eval.getHeader().classIndex() >= 0
&& m_eval.getHeader().classAttribute().isNominal()) {
results += "\n" + m_eval.toMatrixString();
}
textTitle = "Results: " + textTitle;
Data textData = new Data(StepManager.CON_TEXT);
textData.setPayloadElement(StepManager.CON_TEXT, results);
textData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
textTitle);
getStepManager().outputData(textData);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
return;
}
weka.classifiers.Classifier classifier =
(weka.classifiers.Classifier) data
.getPayloadElement(StepManager.CON_INCREMENTAL_CLASSIFIER);
Instance instance =
(Instance) data.getPayloadElement(StepManager.CON_AUX_DATA_TEST_INSTANCE);
try {
if (m_reset) {
m_reset = false;
m_classifierName = classifier.getClass().getName();
m_classifierName =
m_classifierName.substring(m_classifierName.lastIndexOf(".") + 1,
m_classifierName.length());
m_eval = new Evaluation(instance.dataset());
m_eval.useNoPriors();
if (m_windowSize > 0) {
m_windowEval = new Evaluation(instance.dataset());
m_windowEval.useNoPriors();
}
if (instance.classAttribute().isNominal()) {
if (!instance.classIsMissing()) {
m_dataPoint = new double[3];
m_dataLegend.add("Accuracy");
m_dataLegend.add("RMSE (prob)");
m_dataLegend.add("Kappa");
} else {
m_dataPoint = new double[1];
m_dataLegend.add("Confidence");
}
} else {
m_dataPoint = new double[1];
if (instance.classIsMissing()) {
m_dataLegend.add("Prediction");
} else {
m_dataLegend.add("RMSE");
}
}
}
getStepManager().throughputUpdateStart();
m_instanceCount++;
double[] dist = classifier.distributionForInstance(instance);
double pred = 0;
if (!instance.classIsMissing()) {
if (m_outputInfoRetrievalStats) {
m_eval.evaluateModelOnceAndRecordPrediction(dist, instance);
} else {
m_eval.evaluateModelOnce(dist, instance);
}
if (m_windowSize > 0) {
m_windowEval.evaluateModelOnce(dist, instance);
m_window.addFirst(instance);
m_windowedPreds.addFirst(dist);
if (m_instanceCount > m_windowSize) {
// forget the oldest prediction
Instance oldest = m_window.removeLast();
double[] oldDist = m_windowedPreds.removeLast();
oldest.setWeight(-oldest.weight());
m_windowEval.evaluateModelOnce(oldDist, oldest);
oldest.setWeight(-oldest.weight());
}
}
} else {
pred = classifier.classifyInstance(instance);
}
if (instance.classIndex() >= 0) {
// need to check that the class is not missing
if (instance.classAttribute().isNominal()) {
if (!instance.classIsMissing()) {
if (m_windowSize > 0) {
m_dataPoint[1] = m_windowEval.rootMeanSquaredError();
m_dataPoint[2] = m_windowEval.kappa();
} else {
m_dataPoint[1] = m_eval.rootMeanSquaredError();
m_dataPoint[2] = m_eval.kappa();
}
}
double primaryMeasure = 0;
if (!instance.classIsMissing()) {
primaryMeasure =
m_windowSize > 0 ? 1.0 - m_windowEval.errorRate() : 1.0 - m_eval
.errorRate();
} else {
// record confidence as the primary measure
// (another possibility would be entropy of
// the distribution, or perhaps average
// confidence)
primaryMeasure = dist[Utils.maxIndex(dist)];
}
m_dataPoint[0] = primaryMeasure;
m_chartData
.setPayloadElement(StepManager.CON_AUX_DATA_CHART_MIN, 0.0);
m_chartData
.setPayloadElement(StepManager.CON_AUX_DATA_CHART_MAX, 1.0);
m_chartData.setPayloadElement(StepManager.CON_AUX_DATA_CHART_LEGEND,
m_dataLegend);
m_chartData.setPayloadElement(
StepManager.CON_AUX_DATA_CHART_DATA_POINT, m_dataPoint);
} else {
// numeric class
double update;
if (!instance.classIsMissing()) {
update =
m_windowSize > 0 ? m_windowEval.rootMeanSquaredError() : m_eval
.rootMeanSquaredError();
} else {
update = pred;
}
m_dataPoint[0] = update;
if (update > m_max) {
m_max = update;
}
if (update < m_min) {
m_min = update;
}
m_chartData.setPayloadElement(StepManager.CON_AUX_DATA_CHART_MIN,
instance.classIsMissing() ? m_min : 0.0);
m_chartData.setPayloadElement(StepManager.CON_AUX_DATA_CHART_MAX,
m_max);
m_chartData.setPayloadElement(StepManager.CON_AUX_DATA_CHART_LEGEND,
m_dataLegend);
m_chartData.setPayloadElement(
StepManager.CON_AUX_DATA_CHART_DATA_POINT, m_dataPoint);
}
if (isStopRequested()) {
return;
}
getStepManager().throughputUpdateEnd();
getStepManager().outputData(m_chartData.getConnectionName(),
m_chartData);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Set how often progress is reported to the status bar.
*
* @param s report progress every s instances
*/
public void setStatusFrequency(int s) {
m_statusFrequency = s;
}
/**
* Get how often progress is reported to the status bar.
*
* @return after how many instances, progress is reported to the status bar
*/
public int getStatusFrequency() {
return m_statusFrequency;
}
/**
* Return a tip text string for this property
*
* @return a string for the tip text
*/
public String statusFrequencyTipText() {
return "How often to report progress to the status bar.";
}
/**
* Set whether to output per-class information retrieval statistics (nominal
* class only).
*
* @param i true if info retrieval stats are to be output
*/
public void setOutputPerClassInfoRetrievalStats(boolean i) {
m_outputInfoRetrievalStats = i;
}
/**
* Get whether per-class information retrieval stats are to be output.
*
* @return true if info retrieval stats are to be output
*/
public boolean getOutputPerClassInfoRetrievalStats() {
return m_outputInfoRetrievalStats;
}
/**
* Return a tip text string for this property
*
* @return a string for the tip text
*/
public String outputPerClassInfoRetrievalStatsTipText() {
return "Output per-class info retrieval stats. If set to true, predictions get "
+ "stored so that stats such as AUC can be computed. Note: this consumes some memory.";
}
/**
* Set whether to compute evaluation for charting over a fixed sized window of
* the most recent instances (rather than the whole stream).
*
* @param windowSize the size of the window to use for computing the
* evaluation metrics used for charting. Setting a value of zero or
* less specifies that no windowing is to be used.
*/
public void setChartingEvalWindowSize(int windowSize) {
m_windowSize = windowSize;
}
/**
* Get whether to compute evaluation for charting over a fixed sized window of
* the most recent instances (rather than the whole stream).
*
* @return the size of the window to use for computing the evaluation metrics
* used for charting. Setting a value of zero or less specifies that
* no windowing is to be used.
*/
public int getChartingEvalWindowSize() {
return m_windowSize;
}
/**
* Return a tip text string for this property
*
* @return a string for the tip text
*/
public String chartingEvalWindowSizeTipText() {
return "For charting only, specify a sliding window size over which to compute "
+ "performance stats. <= 0 means eval on whole stream";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/InstanceStreamToBatchMaker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* InstanceStreamToBatchMaker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Step that converts an incoming instance stream to a batch dataset
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "InstanceStreamToBatchMaker", category = "Flow",
toolTipText = "Converts an incoming instance stream into a batch dataset",
iconPath = KFGUIConsts.BASE_ICON_PATH + "InstanceStreamToBatchMaker.gif")
public class InstanceStreamToBatchMaker extends BaseStep {
/** For serialization */
private static final long serialVersionUID = 5461324282251111320L;
/** True if we've been reset */
protected boolean m_isReset;
/** The structure of the incoming instances */
protected Instances m_structure;
/** True if the incoming data contains string attributes */
protected boolean m_hasStringAtts;
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE);
}
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
return null;
}
/**
* Process incoming data
*
* @param data the payload to process
* @throws WekaException
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_isReset) {
m_isReset = false;
if (data.getPrimaryPayload() == null) {
throw new WekaException("We didn't receive any instances!");
}
getStepManager().logDetailed("Collecting instances...");
Instance temp = data.getPrimaryPayload();
m_structure = new Instances(temp.dataset(), 0).stringFreeStructure();
m_hasStringAtts = temp.dataset().checkForStringAttributes();
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (!getStepManager().isStreamFinished(data)) {
getStepManager().throughputUpdateStart();
Instance inst = data.getPrimaryPayload();
if (m_hasStringAtts) {
for (int i = 0; i < m_structure.numAttributes(); i++) {
if (m_structure.attribute(i).isString() && !inst.isMissing(i)) {
int index =
m_structure.attribute(i).addStringValue(inst.stringValue(i));
inst.setValue(i, index);
}
}
}
m_structure.add(inst);
getStepManager().throughputUpdateEnd();
} else {
// output batch
m_structure.compactify();
getStepManager().logBasic(
"Emitting a batch of " + m_structure.numInstances() + " instances.");
List<String> outCons =
new ArrayList<String>(getStepManager().getOutgoingConnections()
.keySet());
Data out = new Data(outCons.get(0), m_structure);
out.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
out.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (!isStopRequested()) {
getStepManager().outputData(out);
getStepManager().finished();
} else {
getStepManager().interrupted();
}
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().numIncomingConnections() > 0) {
// we don't alter the structure of the incoming data
return getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_INSTANCE);
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Job.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Job.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Environment;
import weka.core.OptionMetadata;
import weka.core.Settings;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.BaseExecutionEnvironment;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.Flow;
import weka.knowledgeflow.FlowExecutor;
import weka.knowledgeflow.FlowRunner;
import weka.knowledgeflow.JSONFlowLoader;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.KFDefaults;
import weka.knowledgeflow.LogManager;
import weka.knowledgeflow.LoggingLevel;
import weka.knowledgeflow.StepManager;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
/**
* Step that executes another flow as a "job". Typically, you would parameterize
* the flow to be executed with variables (in steps that support variables) and
* then use a a data source connected to a SetVariables step to change the
* values of the variables dynamically at runtime. For example, a DataGrid could
* be used to define a set of instances with a string variable containing paths
* to ARFF files to process; SetVariables can be used to transfer these file
* paths from the incoming instances generated by DataGrid to the values of
* variables; then the Job step can execute it's sub-flow for each configuration
* of variables received, thus processing a different ARFF file (if the subflow
* uses an ArffLoader step).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Job", category = "Flow",
toolTipText = "Execute a flow as a 'job' and wait for it to finish",
iconPath = KFGUIConsts.BASE_ICON_PATH + "Job.gif")
public class Job extends BaseStep {
private static final long serialVersionUID = -8684065684979500325L;
protected File m_flowToRun = new File("--NONE--");
protected transient FlowExecutor m_flowExecutor;
protected LoggingLevel m_logLevel = LoggingLevel.BASIC;
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = false)
@OptionMetadata(displayName = "Flow file",
description = "The flow to execute", displayOrder = 0)
public void setFlowFile(File flowFile) {
m_flowToRun = flowFile;
}
public File getFlowFile() {
return m_flowToRun;
}
@Override
public void stepInit() throws WekaException {
String fileName = m_flowToRun.toString();
if (fileName.equals("--NONE--")) {
throw new WekaException("No flow to execute specified!");
}
fileName = getStepManager().environmentSubstitute(fileName);
m_flowExecutor =
((BaseExecutionEnvironment) getStepManager().getExecutionEnvironment())
.getDefaultFlowExecutor();
Settings settings = new Settings("weka", KFDefaults.APP_ID);
try {
settings.loadSettings();
} catch (IOException ex) {
throw new WekaException(ex);
}
settings.applyDefaults(new KFDefaults());
m_flowExecutor.setSettings(settings);
// setting it on the flow executor
Environment env =
new Environment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
m_flowExecutor.getExecutionEnvironment().setEnvironmentVariables(env);
Flow flowToRun = null;
if (new File(fileName).exists()) {
flowToRun = Flow.loadFlow(new File(fileName), getStepManager().getLog());
} else {
String fileNameWithCorrectSeparators =
fileName.replace(File.separatorChar, '/');
if (this.getClass().getClassLoader()
.getResource(fileNameWithCorrectSeparators) != null) {
flowToRun = Flow.loadFlow(
this.getClass().getClassLoader()
.getResourceAsStream(fileNameWithCorrectSeparators),
new JSONFlowLoader());
}
}
m_flowExecutor.setFlow(flowToRun);
final String flowToRunName = flowToRun.getFlowName();
m_flowExecutor.setLogger(new FlowRunner.SimpleLogger() {
@Override
public void logMessage(String lm) {
if (lm.contains("[Low]")) {
getStepManager().logLow(
lm.replace("[Low]", "<sub-flow:" + flowToRunName + ">"));
} else if (lm.contains("[Basic]")) {
getStepManager().logBasic(
lm.replace("[Basic]", "<sub-flow:" + flowToRunName + ">"));
} else if (lm.contains("[Detailed]")) {
getStepManager().logDetailed(
lm.replace("[Detailed]", "<sub-flow:" + flowToRunName + ">"));
} else if (lm.contains("[Debugging]")) {
getStepManager().logDebug(
lm.replace("[Debugging]", "<sub-flow:" + flowToRunName + ">"));
} else if (lm.contains("[Warning]")) {
getStepManager().logWarning(
lm.replace("[Warning]", "<sub-flow:" + flowToRunName + ">"));
} else {
getStepManager().logBasic("<sub-flow>" + lm);
}
}
});
}
@Override
public void start() throws WekaException {
if (getStepManager().numIncomingConnections() == 0) {
getStepManager().logBasic("Launching as a start point");
runFlow(null, null, null);
}
}
@Override
public void processIncoming(Data data) throws WekaException {
if (!getStepManager().isStreamFinished(data)) {
Map<String, String> varsToSet =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES);
Map<String, Map<String, String>> propsToSet =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES);
Map<String, LinkedHashSet<Data>> results =
data.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS);
if (varsToSet != null) {
getStepManager().logBasic(
"Received variables (" + varsToSet.size() + " key-value pairs)");
}
if (propsToSet != null) {
getStepManager().logBasic(
"Received properties (" + propsToSet.size() + " target steps)");
}
if (results != null) {
getStepManager()
.logBasic(
"Received results containing " + results.size()
+ " connection types");
}
getStepManager().logBasic("Launching sub-flow");
runFlow(varsToSet, propsToSet, results);
}
}
/**
* Run the sub-flow using the supplied environment variables (if any)
*
* @param varsToSet variables to set before executing the sub-flow. Can be
* null.
* @param propsToSet property values for target steps (only scheme-based steps
* can be targets)
* @param results results (if any) to pass in to the sub-flow
* @throws WekaException if a problem occurs
*/
protected void runFlow(Map<String, String> varsToSet,
Map<String, Map<String, String>> propsToSet,
Map<String, LinkedHashSet<Data>> results) throws WekaException {
getStepManager().processing();
JobEnvironment env =
new JobEnvironment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
m_flowExecutor.getExecutionEnvironment().setEnvironmentVariables(env);
if (varsToSet != null) {
for (Map.Entry<String, String> e : varsToSet.entrySet()) {
env.addVariable(e.getKey(), e.getValue());
}
}
if (propsToSet != null) {
env.addToStepProperties(propsToSet);
}
if (results != null) {
env.addAllResults(results);
}
getStepManager().statusMessage(
"Executing flow '" + m_flowExecutor.getFlow().getFlowName() + "'");
try {
m_flowExecutor.runParallel();
m_flowExecutor.waitUntilFinished();
// just give the executor a bit longer in order to
// complete shutdown of executor services
try {
Thread.sleep(200);
} catch (InterruptedException e) {
// ignore
}
Data success =
new Data(StepManager.CON_JOB_SUCCESS, getName()
+ " completed successfully.");
success.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
if (varsToSet != null) {
success.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES, varsToSet);
}
if (propsToSet != null) {
success.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES, propsToSet);
}
JobEnvironment flowEnv =
(JobEnvironment) m_flowExecutor.getExecutionEnvironment()
.getEnvironmentVariables();
if (flowEnv.getResultData() != null) {
success.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_RESULTS,
flowEnv.getResultData());
}
getStepManager().outputData(success);
} catch (WekaException ex) {
Data failure =
new Data(StepManager.CON_JOB_FAILURE, LogManager.stackTraceToString(ex));
failure.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
if (varsToSet != null) {
failure.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES, varsToSet);
}
if (propsToSet != null) {
failure.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES, propsToSet);
}
getStepManager().outputData(failure);
}
getStepManager().finished();
}
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<>();
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_ENVIRONMENT,
StepManager.CON_JOB_SUCCESS, StepManager.CON_JOB_FAILURE);
}
return result;
}
@Override
public List<String> getOutgoingConnectionTypes() {
return Arrays.asList(StepManager.CON_JOB_SUCCESS,
StepManager.CON_JOB_FAILURE);
}
/**
* Get the custom editor for this step
*
* @return the fully qualified class name of the clustom editor for this step
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.JobStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Join.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Join.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Range;
import weka.core.SerializedObject;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Step that performs an inner join on one or more key fields from two incoming
* batch or streaming datasets.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "Join",
category = "Flow",
toolTipText = "Performs an inner join on two incoming datasets/instance streams (IMPORTANT: assumes that "
+ "both datasets are sorted in ascending order of the key fields). If data is not sorted then use"
+ "a Sorter step to sort both into ascending order of the key fields. Does not handle the case where"
+ "keys are not unique in one or both inputs.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "Join.gif")
public class Join extends BaseStep {
/** Separator used to separate first and second input key specifications */
public static final String KEY_SPEC_SEPARATOR = "@@KS@@";
private static final long serialVersionUID = -8248954818247532014L;
/** First source of data */
protected StepManager m_firstInput;
/** Second source of data */
protected StepManager m_secondInput;
/** Whether the first is finished (incremental mode) */
protected transient boolean m_firstFinished;
/** Whether the second is finished (incremental mode) */
protected transient boolean m_secondFinished;
/** Connection type of the first input */
protected String m_firstInputConnectionType = "";
/** Connection type of the second input */
protected String m_secondInputConnectionType = "";
/** Buffer for the first input (capped at 100 for incremental) */
protected transient Queue<Sorter.InstanceHolder> m_firstBuffer;
/** Buffer for the second input (capped at 100 for incremental) */
protected transient Queue<Sorter.InstanceHolder> m_secondBuffer;
/** Reusable data object for streaming output */
protected Data m_streamingData;
/** The structure of the first incoming dataset */
protected transient Instances m_headerOne;
/** The structure of the second incoming dataset */
protected transient Instances m_headerTwo;
/** The structure of the outgoing dataset */
protected transient Instances m_mergedHeader;
/**
* A set of copied outgoing structure instances. Used when there are string
* attributes present in the incremental case in order to prevent concurrency
* problems where string values could get clobbered in the header.
*/
protected transient List<Instances> m_headerPool;
/** Used to cycle over the headers in the header pool */
protected transient AtomicInteger m_count;
/** True if string attributes are present in the incoming data */
protected boolean m_stringAttsPresent;
/** True if the step is running incrementally */
protected boolean m_runningIncrementally;
/** Indexes of the key fields for the first input */
protected int[] m_keyIndexesOne;
/** Indexes of the key fields for the second input */
protected int[] m_keyIndexesTwo;
/** Holds the internal representation of the key specification */
protected String m_keySpec = "";
/** Holds indexes of string attributes, keyed by attribute name */
protected Map<String, Integer> m_stringAttIndexesOne;
/** Holds indexes of string attributes, keyed by attribute name */
protected Map<String, Integer> m_stringAttIndexesTwo;
/**
* True if the first input stream is waiting due to a full buffer (incremental
* mode only)
*/
protected boolean m_firstIsWaiting;
/**
* True if the second input stream is waiting due to a full buffer
* (incremental mode only)
*/
protected boolean m_secondIsWaiting;
/**
* Set the key specification (in internal format -
* k11,k12,...,k1nKEY_SPEC_SEPARATORk21,k22,...,k2n)
*
* @param ks the keys specification
*/
public void setKeySpec(String ks) {
m_keySpec = ks;
}
/**
* Get the key specification (in internal format -
* k11,k12,...,k1nKEY_SPEC_SEPARATORk21,k22,...,k2n)
*
* @return the keys specification
*/
public String getKeySpec() {
return m_keySpec;
}
/**
* Get the names of the connected steps as a list
*
* @return the names of the connected steps as a list
*/
public List<String> getConnectedInputNames() {
// see what's connected (if anything)
establishFirstAndSecondConnectedInputs();
List<String> connected = new ArrayList<String>();
connected.add(m_firstInput != null ? m_firstInput.getName() : null);
connected.add(m_secondInput != null ? m_secondInput.getName() : null);
return connected;
}
/**
* Get the Instances structure being produced by the first input
*
* @return the Instances structure from the first input
* @throws WekaException if a problem occurs
*/
public Instances getFirstInputStructure() throws WekaException {
if (m_firstInput == null) {
establishFirstAndSecondConnectedInputs();
}
if (m_firstInput != null) {
return getStepManager().getIncomingStructureFromStep(m_firstInput,
m_firstInputConnectionType);
}
return null;
}
/**
* Get the Instances structure being produced by the second input
*
* @return the Instances structure from the second input
* @throws WekaException if a problem occurs
*/
public Instances getSecondInputStructure() throws WekaException {
if (m_secondInput == null) {
establishFirstAndSecondConnectedInputs();
}
if (m_secondInput != null) {
return getStepManager().getIncomingStructureFromStep(m_secondInput,
m_secondInputConnectionType);
}
return null;
}
/**
* Look for, and configure with respect to, first and second inputs
*/
protected void establishFirstAndSecondConnectedInputs() {
m_firstInput = null;
m_secondInput = null;
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
if (m_firstInput != null && m_secondInput != null) {
break;
}
for (StepManager m : e.getValue()) {
if (m_firstInput == null) {
m_firstInput = m;
m_firstInputConnectionType = e.getKey();
} else if (m_secondInput == null) {
m_secondInput = m;
m_secondInputConnectionType = e.getKey();
}
if (m_firstInput != null && m_secondInput != null) {
break;
}
}
}
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_firstBuffer = new LinkedList<Sorter.InstanceHolder>();
m_secondBuffer = new LinkedList<Sorter.InstanceHolder>();
m_streamingData = new Data(StepManager.CON_INSTANCE);
m_firstInput = null;
m_secondInput = null;
m_headerOne = null;
m_headerTwo = null;
m_firstFinished = false;
m_secondFinished = false;
if (getStepManager().numIncomingConnections() < 2) {
throw new WekaException("Two incoming connections are required for the "
+ "Join step");
}
establishFirstAndSecondConnectedInputs();
}
/**
* Process some incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
processStreaming(data);
if (isStopRequested()) {
getStepManager().interrupted();
}
} else {
processBatch(data);
if (isStopRequested()) {
getStepManager().interrupted();
}
return;
}
}
/**
* Handle streaming data
*
* @param data an instance of streaming data
* @throws WekaException if a problem occurs
*/
protected synchronized void processStreaming(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
if (data.getSourceStep().getStepManager() == m_firstInput) {
m_firstFinished = true;
getStepManager().logBasic(
"Finished receiving from " + m_firstInput.getName());
} else if (data.getSourceStep().getStepManager() == m_secondInput) {
m_secondFinished = true;
getStepManager().logBasic(
"Finished receiving from " + m_secondInput.getName());
}
if (m_firstFinished && m_secondFinished) {
clearBuffers();
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
}
return;
}
Instance inst = data.getPrimaryPayload();
StepManager source = data.getSourceStep().getStepManager();
if (m_headerOne == null || m_headerTwo == null) {
if (m_headerOne == null && source == m_firstInput) {
m_headerOne = new Instances(inst.dataset(), 0);
getStepManager().logBasic(
"Initializing buffer for " + m_firstInput.getName());
m_stringAttIndexesOne = new HashMap<String, Integer>();
for (int i = 0; i < m_headerOne.numAttributes(); i++) {
if (m_headerOne.attribute(i).isString()) {
m_stringAttIndexesOne.put(m_headerOne.attribute(i).name(), i);
}
}
}
if (m_headerTwo == null && source == m_secondInput) {
m_headerTwo = new Instances(inst.dataset(), 0);
getStepManager().logBasic(
"Initializing buffer for " + m_secondInput.getName());
m_stringAttIndexesTwo = new HashMap<String, Integer>();
for (int i = 0; i < m_headerTwo.numAttributes(); i++) {
if (m_headerTwo.attribute(i).isString()) {
m_stringAttIndexesTwo.put(m_headerTwo.attribute(i).name(), i);
}
}
}
if (m_mergedHeader == null) {
// can we determine the header?
if (m_headerOne != null && m_headerTwo != null && m_keySpec != null
&& m_keySpec.length() > 0) {
// construct merged header & check validity of indexes
generateMergedHeader();
}
}
}
if (source == m_firstInput) {
addToFirstBuffer(inst);
} else {
addToSecondBuffer(inst);
}
if (source == m_firstInput && m_secondBuffer.size() <= 100
&& m_secondIsWaiting) {
m_secondIsWaiting = false;
notifyAll();
} else if (source == m_secondInput && m_secondBuffer.size() <= 100
&& m_firstIsWaiting) {
m_firstIsWaiting = false;
notifyAll();
}
if (isStopRequested()) {
return;
}
Instance outputI = processBuffers();
if (outputI != null) {
getStepManager().throughputUpdateStart();
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, outputI);
getStepManager().outputData(m_streamingData);
getStepManager().throughputUpdateEnd();
}
}
/**
* Copy the string values out of an instance into the temporary storage in
* InstanceHolder
*
* @param holder the InstanceHolder encapsulating the instance and it's string
* values
* @param stringAttIndexes indices of string attributes in the instance
*/
private static void copyStringAttVals(Sorter.InstanceHolder holder,
Map<String, Integer> stringAttIndexes) {
for (String attName : stringAttIndexes.keySet()) {
Attribute att = holder.m_instance.dataset().attribute(attName);
String val = holder.m_instance.stringValue(att);
if (holder.m_stringVals == null) {
holder.m_stringVals = new HashMap<String, String>();
}
holder.m_stringVals.put(attName, val);
}
}
/**
* Add an instance to the first buffer
*
* @param inst the instance to add
*/
protected synchronized void addToFirstBuffer(Instance inst) {
if (isStopRequested()) {
return;
}
Sorter.InstanceHolder newH = new Sorter.InstanceHolder();
newH.m_instance = inst;
copyStringAttVals(newH, m_stringAttIndexesOne);
m_firstBuffer.add(newH);
if (m_firstBuffer.size() > 100 && !m_secondFinished) {
try {
m_firstIsWaiting = true;
wait();
} catch (InterruptedException ex) {
// ignore
}
}
}
/**
* Add an instance to the second buffer
*
* @param inst the instance to add
*/
protected synchronized void addToSecondBuffer(Instance inst) {
if (isStopRequested()) {
return;
}
Sorter.InstanceHolder newH = new Sorter.InstanceHolder();
newH.m_instance = inst;
copyStringAttVals(newH, m_stringAttIndexesTwo);
m_secondBuffer.add(newH);
if (m_secondBuffer.size() > 100 && !m_firstFinished) {
try {
m_secondIsWaiting = true;
wait();
} catch (InterruptedException e) {
//
}
}
}
/**
* Clear the buffers
*
* @throws WekaException if a problem occurs
*/
protected synchronized void clearBuffers() throws WekaException {
while (m_firstBuffer.size() > 0 && m_secondBuffer.size() > 0) {
if (isStopRequested()) {
return;
}
getStepManager().throughputUpdateStart();
Instance newInst = processBuffers();
getStepManager().throughputUpdateEnd();
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, newInst);
getStepManager().outputData(m_streamingData);
}
}
/**
* Process batch data.
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected synchronized void processBatch(Data data) throws WekaException {
Instances insts = data.getPrimaryPayload();
if (data.getSourceStep().getStepManager() == m_firstInput) {
m_headerOne = new Instances(insts, 0);
getStepManager().logDetailed(
"Receiving batch from " + m_firstInput.getName());
for (int i = 0; i < insts.numInstances() && !isStopRequested(); i++) {
Sorter.InstanceHolder tempH = new Sorter.InstanceHolder();
tempH.m_instance = insts.instance(i);
m_firstBuffer.add(tempH);
}
} else if (data.getSourceStep().getStepManager() == m_secondInput) {
m_headerTwo = new Instances(insts, 0);
getStepManager().logDetailed(
"Receiving batch from " + m_secondInput.getName());
for (int i = 0; i < insts.numInstances() && !isStopRequested(); i++) {
Sorter.InstanceHolder tempH = new Sorter.InstanceHolder();
tempH.m_instance = insts.instance(i);
m_secondBuffer.add(tempH);
}
} else {
throw new WekaException("This should never happen");
}
if (m_firstBuffer.size() > 0 && m_secondBuffer.size() > 0) {
getStepManager().processing();
generateMergedHeader();
Instances newData = new Instances(m_mergedHeader, 0);
while (!isStopRequested() && m_firstBuffer.size() > 0
&& m_secondBuffer.size() > 0) {
Instance newI = processBuffers();
if (newI != null) {
newData.add(newI);
}
}
for (String outConnType : getStepManager().getOutgoingConnections()
.keySet()) {
if (isStopRequested()) {
return;
}
Data outputD = new Data(outConnType, newData);
outputD.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
outputD.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
getStepManager().outputData(outputD);
}
getStepManager().finished();
}
}
/**
* Check both buffers and return a joined instance (if possible at this time)
* or null
*
* @return a joined instance or null
*/
protected synchronized Instance processBuffers() {
if (m_firstBuffer.size() > 0 && m_secondBuffer.size() > 0) {
Sorter.InstanceHolder firstH = m_firstBuffer.peek();
Sorter.InstanceHolder secondH = m_secondBuffer.peek();
Instance first = firstH.m_instance;
Instance second = secondH.m_instance;
int cmp = compare(first, second, firstH, secondH);
if (cmp == 0) {
// match on all keys - output joined instance
Instance newInst =
generateMergedInstance(m_firstBuffer.remove(),
m_secondBuffer.remove());
return newInst;
} else if (cmp < 0) {
// second is ahead of first - discard rows from first
do {
m_firstBuffer.remove();
if (m_firstBuffer.size() > 0) {
firstH = m_firstBuffer.peek();
first = firstH.m_instance;
cmp = compare(first, second, firstH, secondH);
}
} while (cmp < 0 && m_firstBuffer.size() > 0);
} else {
// first is ahead of second - discard rows from second
do {
m_secondBuffer.remove();
if (m_secondBuffer.size() > 0) {
secondH = m_secondBuffer.peek();
second = secondH.m_instance;
cmp = compare(first, second, firstH, secondH);
}
} while (cmp > 0 && m_secondBuffer.size() > 0);
}
}
return null;
}
/**
* Compares two instances according to the keys
*
* @param one the first instance
* @param two the second instance
* @param oneH the first instance holder (in case string attributes are
* present and we are running incrementally)
* @param twoH the second instance holder
* @return the comparison according to the keys
*/
protected int compare(Instance one, Instance two, Sorter.InstanceHolder oneH,
Sorter.InstanceHolder twoH) {
for (int i = 0; i < m_keyIndexesOne.length; i++) {
if (one.isMissing(m_keyIndexesOne[i])
&& two.isMissing(m_keyIndexesTwo[i])) {
continue;
}
if (one.isMissing(m_keyIndexesOne[i])
|| two.isMissing(m_keyIndexesTwo[i])) {
// ensure that the input with the missing value gets discarded
if (one.isMissing(m_keyIndexesOne[i])) {
return -1;
} else {
return 1;
}
}
if (m_mergedHeader.attribute(m_keyIndexesOne[i]).isNumeric()) {
double v1 = one.value(m_keyIndexesOne[i]);
double v2 = two.value(m_keyIndexesTwo[i]);
if (v1 != v2) {
return v1 < v2 ? -1 : 1;
}
} else if (m_mergedHeader.attribute(m_keyIndexesOne[i]).isNominal()) {
String oneS = one.stringValue(m_keyIndexesOne[i]);
String twoS = two.stringValue(m_keyIndexesTwo[i]);
int cmp = oneS.compareTo(twoS);
if (cmp != 0) {
return cmp;
}
} else if (m_mergedHeader.attribute(m_keyIndexesOne[i]).isString()) {
String attNameOne = m_mergedHeader.attribute(m_keyIndexesOne[i]).name();
String attNameTwo = m_mergedHeader.attribute(m_keyIndexesTwo[i]).name();
String oneS =
oneH.m_stringVals == null || oneH.m_stringVals.size() == 0 ? one
.stringValue(m_keyIndexesOne[i]) : oneH.m_stringVals
.get(attNameOne);
String twoS =
twoH.m_stringVals == null || twoH.m_stringVals.size() == 0 ? two
.stringValue(m_keyIndexesTwo[i]) : twoH.m_stringVals
.get(attNameTwo);
int cmp = oneS.compareTo(twoS);
if (cmp != 0) {
return cmp;
}
}
}
return 0;
}
/**
* Generate a merged instance from two input instances that match on the key
* fields
*
* @param one the first input instance
* @param two the second input instance
* @return the merged instance
*/
protected synchronized Instance generateMergedInstance(
Sorter.InstanceHolder one, Sorter.InstanceHolder two) {
double[] vals = new double[m_mergedHeader.numAttributes()];
int count = 0;
Instances currentStructure = m_mergedHeader;
if (m_runningIncrementally && m_stringAttsPresent) {
currentStructure = m_headerPool.get(m_count.getAndIncrement() % 10);
}
for (int i = 0; i < m_headerOne.numAttributes(); i++) {
vals[count] = one.m_instance.value(i);
if (one.m_stringVals != null && one.m_stringVals.size() > 0
&& m_mergedHeader.attribute(count).isString()) {
String valToSetInHeader =
one.m_stringVals.get(one.m_instance.attribute(i).name());
currentStructure.attribute(count).setStringValue(valToSetInHeader);
vals[count] = 0;
}
count++;
}
for (int i = 0; i < m_headerTwo.numAttributes(); i++) {
vals[count] = two.m_instance.value(i);
if (two.m_stringVals != null && two.m_stringVals.size() > 0
&& m_mergedHeader.attribute(count).isString()) {
String valToSetInHeader =
one.m_stringVals.get(two.m_instance.attribute(i).name());
currentStructure.attribute(count).setStringValue(valToSetInHeader);
vals[count] = 0;
}
count++;
}
Instance newInst = new DenseInstance(1.0, vals);
newInst.setDataset(currentStructure);
return newInst;
}
/**
* Generate the header of the output instance structure
*/
protected void generateMergedHeader() throws WekaException {
// check validity of key fields first
if (m_keySpec == null || m_keySpec.length() == 0) {
throw new WekaException("Key fields are null!");
}
String resolvedKeySpec = m_keySpec;
resolvedKeySpec = environmentSubstitute(resolvedKeySpec);
String[] parts = resolvedKeySpec.split(KEY_SPEC_SEPARATOR);
if (parts.length != 2) {
throw new WekaException("Invalid key specification");
}
// try to parse as a Range first
for (int i = 0; i < 2; i++) {
String rangeS = parts[i].trim();
Range r = new Range();
r.setUpper(i == 0 ? m_headerOne.numAttributes() : m_headerTwo
.numAttributes());
try {
r.setRanges(rangeS);
if (i == 0) {
m_keyIndexesOne = r.getSelection();
} else {
m_keyIndexesTwo = r.getSelection();
}
} catch (IllegalArgumentException e) {
// assume a list of attribute names
String[] names = rangeS.split(",");
if (i == 0) {
m_keyIndexesOne = new int[names.length];
} else {
m_keyIndexesTwo = new int[names.length];
}
for (int j = 0; j < names.length; j++) {
String aName = names[j].trim();
Attribute anAtt =
(i == 0) ? m_headerOne.attribute(aName) : m_headerTwo
.attribute(aName);
if (anAtt == null) {
throw new WekaException("Invalid key attribute name");
}
if (i == 0) {
m_keyIndexesOne[j] = anAtt.index();
} else {
m_keyIndexesTwo[j] = anAtt.index();
}
}
}
}
if (m_keyIndexesOne == null || m_keyIndexesTwo == null) {
throw new WekaException("Key fields are null!");
}
if (m_keyIndexesOne.length != m_keyIndexesTwo.length) {
throw new WekaException(
"Number of key fields are different for each input");
}
// check types
for (int i = 0; i < m_keyIndexesOne.length; i++) {
if (m_headerOne.attribute(m_keyIndexesOne[i]).type() != m_headerTwo
.attribute(m_keyIndexesTwo[i]).type()) {
throw new WekaException("Type of key corresponding to key fields "
+ "differ: input 1 - "
+ Attribute.typeToStringShort(m_headerOne
.attribute(m_keyIndexesOne[i]))
+ " input 2 - "
+ Attribute.typeToStringShort(m_headerTwo
.attribute(m_keyIndexesTwo[i])));
}
}
ArrayList<Attribute> newAtts = new ArrayList<Attribute>();
Set<String> nameLookup = new HashSet<String>();
for (int i = 0; i < m_headerOne.numAttributes(); i++) {
newAtts.add((Attribute) m_headerOne.attribute(i).copy());
nameLookup.add(m_headerOne.attribute(i).name());
}
for (int i = 0; i < m_headerTwo.numAttributes(); i++) {
String name = m_headerTwo.attribute(i).name();
if (nameLookup.contains(name)) {
name = name + "_2";
}
newAtts.add(m_headerTwo.attribute(i).copy(name));
}
m_mergedHeader =
new Instances(m_headerOne.relationName() + "+"
+ m_headerTwo.relationName(), newAtts, 0);
m_stringAttsPresent = false;
if (m_mergedHeader.checkForStringAttributes()) {
m_stringAttsPresent = true;
m_headerPool = new ArrayList<Instances>();
m_count = new AtomicInteger();
for (int i = 0; i < 10; i++) {
try {
m_headerPool.add((Instances) (new SerializedObject(m_mergedHeader))
.getObject());
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
if (getStepManager().numIncomingConnections() == 1) {
result.addAll(getStepManager().getIncomingConnections().keySet());
return result;
}
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
// we output the same connection type as the inputs
List<String> result = new ArrayList<String>();
result.addAll(getStepManager().getIncomingConnections().keySet());
return result;
}
return null;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.JoinStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/KFStep.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* KFStep.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* KFStep class annotation
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface KFStep {
/**
* The name of this step
*
* @return the name of the step
*/
String name();
/**
* The top-level folder in the JTree that this step should appear in
*
* @return the name of the top-level folder that this step should appear in
*/
String category();
/**
* Mouse-over tool tip for this step (appears when the mouse hovers over the
* entry in the JTree)
*
* @return the tool tip text for this step
*/
String toolTipText();
/**
* Path (as a resource on the classpath) to the icon for this step
*
* @return the path to the icon for this step
*/
String iconPath();
/**
* True if this processing step is resource intensive (cpu or memory).
* BaseExecution environment will use the limited number of worker thread
* executor service to execute this step in this case.
*
* @return true if this step is CPU-intensive
*/
boolean resourceIntensive() default false;
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Loader.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Join.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Environment;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SerializedObject;
import weka.core.WekaException;
import weka.core.converters.FileSourcedConverter;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.StreamThroughput;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Knowledge Flow step that wraps {@code weka.core.converters.Loader}s.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Loader", category = "DataSources",
toolTipText = "Weka loader wrapper", iconPath = "")
public class Loader extends WekaAlgorithmWrapper implements Serializable {
private static final long serialVersionUID = -788869066035779154L;
/**
* Global info for the wrapped loader (if it exists).
*/
protected String m_globalInfo;
/** True if we're going to be streaming instance objects */
protected boolean m_instanceGeneration;
/** True if there are no outgoing connections */
protected boolean m_noOutputs;
/** Reusable data container */
protected Data m_instanceData;
/** For measuring the overall flow throughput */
protected StreamThroughput m_flowThroughput;
/**
* Get the class of the wrapped algorithm
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.core.converters.Loader.class;
}
/**
* Set the wrapped algorithm to use
*
* @param algo the algorithm to use
*/
@Override
public void setWrappedAlgorithm(Object algo) {
super.setWrappedAlgorithm(algo);
m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultDataSource.gif";
}
/**
* Convenience method - calls {@code getWrappedAlgorithm()}
*
* @return the wrapped loader
*/
public weka.core.converters.Loader getLoader() {
return (weka.core.converters.Loader) getWrappedAlgorithm();
}
/**
* Convenience method - calls {@code setWrappedAlgorithm()}
*
* @param loader the loader to use
*/
@ProgrammaticProperty
public void setLoader(weka.core.converters.Loader loader) {
setWrappedAlgorithm(loader);
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
if (!(getWrappedAlgorithm() instanceof weka.core.converters.Loader)) {
throw new WekaException("Incorrect type of algorithm");
}
int numDatasetOutputs =
getStepManager().numOutgoingConnectionsOfType(StepManager.CON_DATASET);
int numInstanceOutputs =
getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE);
m_noOutputs = numInstanceOutputs == 0 && numDatasetOutputs == 0;
if (numDatasetOutputs > 0 && numInstanceOutputs > 0) {
throw new WekaException(
"Can't have both instance and dataSet outgoing connections!");
}
if (getWrappedAlgorithm() instanceof EnvironmentHandler) {
((EnvironmentHandler) getWrappedAlgorithm())
.setEnvironment(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
}
m_instanceGeneration = numInstanceOutputs > 0;
m_instanceData = new Data(StepManager.CON_INSTANCE);
}
/**
* Start executing
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
if (m_noOutputs) {
return;
}
getStepManager().processing();
weka.core.converters.Loader theLoader =
(weka.core.converters.Loader) getWrappedAlgorithm();
String startMessage =
(theLoader instanceof FileSourcedConverter) ? "Loading "
+ environmentSubstitute(((FileSourcedConverter) theLoader).retrieveFile().toString())
: "Loading...";
getStepManager().logBasic(startMessage);
getStepManager().statusMessage(startMessage);
if (!m_instanceGeneration) {
try {
theLoader.reset();
theLoader.setRetrieval(weka.core.converters.Loader.BATCH);
Instances dataset = theLoader.getDataSet();
getStepManager().logBasic("Loaded " + dataset.relationName());
Data data = new Data();
data.setPayloadElement(StepManager.CON_DATASET, dataset);
getStepManager().outputData(StepManager.CON_DATASET, data);
} catch (Exception ex) {
throw new WekaException(ex);
} finally {
getStepManager().finished();
}
} else {
String stm =
getName() + "$" + hashCode() + 99 + "| overall flow throughput -|";
m_flowThroughput =
new StreamThroughput(stm, "Starting flow...",
((StepManagerImpl) getStepManager()).getLog());
Instance nextInstance = null;
Instances structure = null;
Instances structureCopy = null;
Instances currentStructure = null;
boolean stringAttsPresent = false;
try {
theLoader.reset();
theLoader.setRetrieval(weka.core.converters.Loader.INCREMENTAL);
structure = theLoader.getStructure();
if (structure.checkForStringAttributes()) {
structureCopy =
(Instances) (new SerializedObject(structure).getObject());
stringAttsPresent = true;
}
currentStructure = structure;
} catch (Exception ex) {
throw new WekaException(ex);
}
if (isStopRequested()) {
return;
}
try {
nextInstance = theLoader.getNextInstance(structure);
} catch (Exception ex) {
// getStepManager().throughputFinished(m_instanceData);
throw new WekaException(ex);
}
while (!isStopRequested() && nextInstance != null) {
m_flowThroughput.updateStart();
getStepManager().throughputUpdateStart();
if (stringAttsPresent) {
if (currentStructure == structure) {
currentStructure = structureCopy;
} else {
currentStructure = structure;
}
}
m_instanceData
.setPayloadElement(StepManager.CON_INSTANCE, nextInstance);
try {
nextInstance = theLoader.getNextInstance(currentStructure);
} catch (Exception ex) {
getStepManager().throughputFinished(m_instanceData);
throw new WekaException(ex);
}
getStepManager().throughputUpdateEnd(); // finished read operation
getStepManager().outputData(StepManager.CON_INSTANCE, m_instanceData);
m_flowThroughput.updateEnd(((StepManagerImpl) getStepManager())
.getLog());
}
if (isStopRequested()) {
((StepManagerImpl) getStepManager()).getLog().statusMessage(
stm + "remove");
return;
}
m_flowThroughput.finished(((StepManagerImpl) getStepManager()).getLog());
// signal end of input
m_instanceData.clearPayload();
getStepManager().throughputFinished(m_instanceData);
// int flowSpeed = m_flowThroughput.getAverageInstancesPerSecond();
// String finalMessage += ("" + flowSpeed +
// " insts/sec (flow throughput)");
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
// can't reset the laoder to get the structure if we're actively
// processing...
if (getStepManager().isStepBusy()) {
return null;
}
try {
weka.core.converters.Loader theLoader =
(weka.core.converters.Loader) getWrappedAlgorithm();
theLoader.reset();
if (theLoader instanceof EnvironmentHandler) {
((EnvironmentHandler) theLoader).setEnvironment(Environment
.getSystemWide());
}
return theLoader.getStructure();
} catch (Exception ex) {
getStepManager().logError(ex.getMessage(), ex);
}
return null;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
// doesn't accept incoming connections
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> outgoing = new ArrayList<String>();
int numDatasetOutputs =
getStepManager().numOutgoingConnectionsOfType(StepManager.CON_DATASET);
int numInstanceOutputs =
getStepManager().numOutgoingConnectionsOfType(StepManager.CON_INSTANCE);
if (numDatasetOutputs == 0 && numInstanceOutputs == 0) {
outgoing.add(StepManager.CON_DATASET);
outgoing.add(StepManager.CON_INSTANCE);
} else if (numDatasetOutputs > 0) {
outgoing.add(StepManager.CON_DATASET);
} else if (numInstanceOutputs > 0) {
outgoing.add(StepManager.CON_INSTANCE);
}
return outgoing;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.LoaderStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/MakeResourceIntensive.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* MakeResourceIntensive.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
/**
* A Step that makes downstream steps that are directly connected to this step
* resource intensive (or not). This overrides whatever the downstream step may
* (or may not) have declared in it's {@code KFStep} class annotation with
* regards to whether it is resource intensive (cpu or memory). The Knowledge
* Flow execution environment uses two executor services - a primary one to
* execute batch processing for steps; and a secondary one for executing
* {@code StepTask}s (which are assumed to be resource intensive by default) or
* for executing batch processing for a Step when it declares itself to be
* resource intensive. This secondary executor service uses a limited (typically
* {@code <= num cpu cores}) number of threads. Steps that involve potentially
* intensive (cpu/memory) processing should declare themselves resource
* intensive so that less taxing steps (and the UI) get cpu cycles. E.g. the
* Classifier Step is resource intensive so that processing cross-validation
* folds in parallel for a large data set or computationally intensive
* classifier does not blow out memory or bog the system down.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "MakeResourceIntensive",
category = "Flow",
toolTipText = "Makes downstream connected steps resource intensive (or not)."
+ " This shifts "
+ "processing of such steps between the main step executor<br>"
+ "service and the high resource executor service or vice versa.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DiamondPlain.gif")
public class MakeResourceIntensive extends BaseStep {
private static final long serialVersionUID = -5670771681991035130L;
/** True if downstream steps are to be made resource intensive */
protected boolean m_setAsResourceIntensive = true;
/**
* Set whether downstream steps are to be made resource intensive or not
*
* @param resourceIntensive true if the downstream connected steps are to be
* made resource intensive
*/
@OptionMetadata(
displayName = "Make downstream step(s) high resource",
description = "<html>Makes downstream connected "
+ "steps resource intensive (or not)<br>This shifts processing of such steps "
+ "between the main step executor service and the high resource executor "
+ "service or vice versa.</html>")
public
void setMakeResourceIntensive(boolean resourceIntensive) {
m_setAsResourceIntensive = resourceIntensive;
}
/**
* Get whether downstream steps are to be made resource intensive
*
* @return true if downstream connected steps are to be made resource
* intensive
*/
public boolean getMakeResourceIntensive() {
return m_setAsResourceIntensive;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET, StepManager.CON_BATCH_CLASSIFIER,
StepManager.CON_BATCH_CLUSTERER, StepManager.CON_BATCH_ASSOCIATOR);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
Set<String> inConnTypes =
getStepManager().getIncomingConnections().keySet();
return new ArrayList<String>(inConnTypes);
}
/**
* Process incoming data
*
* @param data the data to process
* @throws WekaException
*/
@Override
public void processIncoming(Data data) throws WekaException {
String connType = data.getConnectionName();
List<StepManager> connected =
getStepManager().getOutgoingConnectedStepsOfConnectionType(connType);
for (StepManager m : connected) {
getStepManager().logDetailed(
"Setting " + m.getName() + " as resource intensive: "
+ m_setAsResourceIntensive);
((StepManagerImpl) m)
.setStepIsResourceIntensive(m_setAsResourceIntensive);
}
getStepManager().outputData(data);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/MemoryBasedDataSource.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* MemoryBasedDataSource.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.Arrays;
import java.util.List;
/**
* Simple start step that stores a set of instances and outputs it in a
* dataSet connection. Gets used programmatically when the setInstances()
* method is invoked on the MainKFPerspective in order to create a new
* Flow containing this step.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "MemoryDataSource", category = "DataSources",
toolTipText = "Memory-based data", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "DefaultDataSource.gif")
public class MemoryBasedDataSource extends BaseStep {
private static final long serialVersionUID = -1901014330145130275L;
/** The data that will be output from this step */
protected Instances m_instances;
/**
* Set the data to output from this step
*
* @param instances
*/
public void setInstances(Instances instances) {
m_instances = instances;
}
/**
* Get the data to output from this step
*
* @return
*/
public Instances getInstances() {
return m_instances;
}
/**
* Initialize the step
*
* @throws WekaException if the data to output has not been set yet
*/
@Override
public void stepInit() throws WekaException {
if (m_instances == null) {
throw new WekaException(
"Has not been initialized with a set of instances");
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET);
}
/**
* Start processing
*
* @throws WekaException if a problem occurs
*/
@Override
public void start() throws WekaException {
getStepManager().processing();
Data output = new Data(StepManager.CON_DATASET, m_instances);
getStepManager().outputData(output);
getStepManager().finished();
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ModelPerformanceChart.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ModelPerformanceChart.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.OffscreenChartRenderer;
import weka.core.PluginManager;
import weka.gui.beans.WekaOffscreenChartRenderer;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.visualize.PlotData2D;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A Step that collects and displays either classifier error plots or threshold
* curves
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ModelPerformanceChart", category = "Visualization",
toolTipText = "Visualize performance charts (such as ROC).",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ModelPerformanceChart.gif")
public class ModelPerformanceChart extends BaseStep implements DataCollector {
private static final long serialVersionUID = 6166590810777938147L;
/** Current set of plots. First element is the master plot */
protected List<PlotData2D> m_plots = new ArrayList<PlotData2D>();
/** For rendering plots to encapsulate in Image connections */
protected transient List<Instances> m_offscreenPlotData;
protected transient List<String> m_thresholdSeriesTitles;
protected transient OffscreenChartRenderer m_offscreenRenderer;
/** Name of the renderer to use for offscreen chart rendering */
protected String m_offscreenRendererName = "Weka Chart Renderer";
/**
* The name of the attribute to use for the x-axis of offscreen plots. If left
* empty, False Positive Rate is used for threshold curves
*/
protected String m_xAxis = "";
/**
* The name of the attribute to use for the y-axis of offscreen plots. If left
* empty, True Positive Rate is used for threshold curves
*/
protected String m_yAxis = "";
/**
* Additional options for the offscreen renderer
*/
protected String m_additionalOptions = "";
/** Width of offscreen plots */
protected String m_width = "500";
/** Height of offscreen plots */
protected String m_height = "400";
/** True if the collected plots contain threshold data */
protected boolean m_dataIsThresholdData;
/**
* Set the name of the attribute for the x-axis in offscreen plots. This
* defaults to "False Positive Rate" for threshold curves if not specified.
*
* @param xAxis the name of the xAxis
*/
@OptionMetadata(displayName = "X-axis attribute",
description = "Attribute name " + "or /first, /last or /<index>",
displayOrder = 1)
public void setOffscreenXAxis(String xAxis) {
m_xAxis = xAxis;
}
/**
* Get the name of the attribute for the x-axis in offscreen plots
*
* @return the name of the xAxis
*/
public String getOffscreenXAxis() {
return m_xAxis;
}
/**
* Set the name of the attribute for the y-axis in offscreen plots. This
* defaults to "True Positive Rate" for threshold curves if not specified.
*
* @param yAxis the name of the xAxis
*/
@OptionMetadata(displayName = "Y-axis attribute",
description = "Attribute name " + "or /first, /last or /<index>",
displayOrder = 2)
public void setOffscreenYAxis(String yAxis) {
m_yAxis = yAxis;
}
/**
* Get the name of the attribute for the y-axix of offscreen plots.
*
* @return the name of the yAxis.
*/
public String getOffscreenYAxis() {
return m_yAxis;
}
/**
* Set the width (in pixels) of the offscreen image to generate.
*
* @param width the width in pixels.
*/
@OptionMetadata(displayName = "Chart width (pixels)",
description = "Width of the rendered chart", displayOrder = 3)
public void setOffscreenWidth(String width) {
m_width = width;
}
/**
* Get the width (in pixels) of the offscreen image to generate.
*
* @return the width in pixels.
*/
public String getOffscreenWidth() {
return m_width;
}
/**
* Set the height (in pixels) of the offscreen image to generate
*
* @param height the height in pixels
*/
@OptionMetadata(displayName = "Chart height (pixels)",
description = "Height of the rendered chart", displayOrder = 4)
public void setOffscreenHeight(String height) {
m_height = height;
}
/**
* Get the height (in pixels) of the offscreen image to generate
*
* @return the height in pixels
*/
public String getOffscreenHeight() {
return m_height;
}
/**
* Set the name of the renderer to use for offscreen chart rendering
* operations
*
* @param rendererName the name of the renderer to use
*/
@ProgrammaticProperty
public void setOffscreenRendererName(String rendererName) {
m_offscreenRendererName = rendererName;
m_offscreenRenderer = null;
}
/**
* Get the name of the renderer to use for offscreen chart rendering
* operations
*
* @return the name of the renderer to use
*/
public String getOffscreenRendererName() {
return m_offscreenRendererName;
}
/**
* Set the additional options for the offscreen renderer
*
* @param additional additional options
*/
@ProgrammaticProperty
public void setOffscreenAdditionalOpts(String additional) {
m_additionalOptions = additional;
}
/**
* Get the additional options for the offscreen renderer
*
* @return the additional options
*/
public String getOffscreenAdditionalOpts() {
return m_additionalOptions;
}
/**
* Configures the offscreen renderer to use
*/
protected void setupOffscreenRenderer() {
getStepManager().logDetailed(
"Initializing offscreen renderer: " + getOffscreenRendererName());
if (m_offscreenRenderer == null) {
if (m_offscreenRendererName == null
|| m_offscreenRendererName.length() == 0) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
return;
}
if (m_offscreenRendererName.equalsIgnoreCase("weka chart renderer")) {
m_offscreenRenderer = new WekaOffscreenChartRenderer();
} else {
try {
Object r =
PluginManager.getPluginInstance(
"weka.gui.beans.OffscreenChartRenderer", m_offscreenRendererName);
if (r != null && r instanceof weka.gui.beans.OffscreenChartRenderer) {
m_offscreenRenderer = (OffscreenChartRenderer) r;
} else {
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
} catch (Exception ex) {
// use built-in default
getStepManager().logWarning(
"Offscreen renderer '" + getOffscreenRendererName()
+ "' is not available, using default weka chart renderer "
+ "instead");
m_offscreenRenderer = new WekaOffscreenChartRenderer();
}
}
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() == 0) {
result.add(StepManager.CON_THRESHOLD_DATA);
result.add(StepManager.CON_VISUALIZABLE_ERROR);
} else {
// we can accept multiple inputs of threshold data, as long
// as they are comparable (we assume this)
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_THRESHOLD_DATA) > 0) {
result.add(StepManager.CON_THRESHOLD_DATA);
}
}
return result;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnections() > 0) {
result.add(StepManager.CON_IMAGE);
}
return result;
}
/**
* Add a threshold plot to the offscreen data collection
*
* @param thresholdD the plot data to add
* @return the image that was added
* @throws WekaException if a problem occurs
*/
protected BufferedImage addOffscreenThresholdPlot(PlotData2D thresholdD)
throws WekaException {
m_offscreenPlotData.add(thresholdD.getPlotInstances());
m_thresholdSeriesTitles.add(thresholdD.getPlotName());
List<String> options = new ArrayList<String>();
String additional = "-color=/last";
if (m_additionalOptions != null && m_additionalOptions.length() > 0) {
additional = m_additionalOptions;
additional = getStepManager().environmentSubstitute(additional);
}
String[] optsParts = additional.split(",");
for (String p : optsParts) {
options.add(p.trim());
}
String xAxis = "False Positive Rate";
if (m_xAxis != null && m_xAxis.length() > 0) {
xAxis = m_xAxis;
xAxis = getStepManager().environmentSubstitute(xAxis);
}
String yAxis = "True Positive Rate";
if (m_yAxis != null && m_yAxis.length() > 0) {
yAxis = m_yAxis;
yAxis = getStepManager().environmentSubstitute(yAxis);
}
String width = m_width;
String height = m_height;
int defWidth = 500;
int defHeight = 400;
width = getStepManager().environmentSubstitute(width);
height = getStepManager().environmentSubstitute(height);
defWidth = Integer.parseInt(width);
defHeight = Integer.parseInt(height);
List<Instances> series = new ArrayList<Instances>();
for (int i = 0; i < m_offscreenPlotData.size(); i++) {
Instances temp = new Instances(m_offscreenPlotData.get(i));
temp.setRelationName(m_thresholdSeriesTitles.get(i));
series.add(temp);
}
try {
return m_offscreenRenderer.renderXYLineChart(defWidth, defHeight, series,
xAxis, yAxis, options);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Add an error plot to the offscreen plot collection
*
* @param plotData the plot to add
* @return the image that was added
* @throws WekaException if a problem occurs
*/
protected BufferedImage addOffscreenErrorPlot(PlotData2D plotData)
throws WekaException {
Instances predictedI = plotData.getPlotInstances();
if (predictedI.classAttribute().isNominal()) {
// split the classes out into individual series.
// add a new attribute to hold point sizes - correctly
// classified instances get default point size (2);
// misclassified instances get point size (5).
// WekaOffscreenChartRenderer can take advantage of this
// information - other plugin renderers may or may not
// be able to use it
ArrayList<Attribute> atts = new ArrayList<Attribute>();
for (int i = 0; i < predictedI.numAttributes(); i++) {
atts.add((Attribute) predictedI.attribute(i).copy());
}
atts.add(new Attribute("@@size@@"));
Instances newInsts =
new Instances(predictedI.relationName(), atts,
predictedI.numInstances());
newInsts.setClassIndex(predictedI.classIndex());
for (int i = 0; i < predictedI.numInstances(); i++) {
double[] vals = new double[newInsts.numAttributes()];
for (int j = 0; j < predictedI.numAttributes(); j++) {
vals[j] = predictedI.instance(i).value(j);
}
vals[vals.length - 1] = 2; // default shape size
Instance ni = new DenseInstance(1.0, vals);
newInsts.add(ni);
}
// predicted class attribute is always actualClassIndex - 1
Instances[] classes = new Instances[newInsts.numClasses()];
for (int i = 0; i < newInsts.numClasses(); i++) {
classes[i] = new Instances(newInsts, 0);
classes[i].setRelationName(newInsts.classAttribute().value(i));
}
Instances errors = new Instances(newInsts, 0);
int actualClass = newInsts.classIndex();
for (int i = 0; i < newInsts.numInstances(); i++) {
Instance current = newInsts.instance(i);
classes[(int) current.classValue()].add((Instance) current.copy());
if (current.value(actualClass) != current.value(actualClass - 1)) {
Instance toAdd = (Instance) current.copy();
// larger shape for an error
toAdd.setValue(toAdd.numAttributes() - 1, 5);
// swap predicted and actual class value so
// that the color plotted for the error series
// is that of the predicted class
double actualClassV = toAdd.value(actualClass);
double predictedClassV = toAdd.value(actualClass - 1);
toAdd.setValue(actualClass, predictedClassV);
toAdd.setValue(actualClass - 1, actualClassV);
errors.add(toAdd);
}
}
errors.setRelationName("Errors");
m_offscreenPlotData.add(errors);
for (Instances classe : classes) {
m_offscreenPlotData.add(classe);
}
} else {
// numeric class - have to make a new set of instances
// with the point sizes added as an additional attribute
ArrayList<Attribute> atts = new ArrayList<Attribute>();
for (int i = 0; i < predictedI.numAttributes(); i++) {
atts.add((Attribute) predictedI.attribute(i).copy());
}
atts.add(new Attribute("@@size@@"));
Instances newInsts =
new Instances(predictedI.relationName(), atts,
predictedI.numInstances());
int[] shapeSizes = plotData.getShapeSize();
for (int i = 0; i < predictedI.numInstances(); i++) {
double[] vals = new double[newInsts.numAttributes()];
for (int j = 0; j < predictedI.numAttributes(); j++) {
vals[j] = predictedI.instance(i).value(j);
}
vals[vals.length - 1] = shapeSizes[i];
Instance ni = new DenseInstance(1.0, vals);
newInsts.add(ni);
}
newInsts.setRelationName(predictedI.classAttribute().name());
m_offscreenPlotData.add(newInsts);
}
List<String> options = new ArrayList<String>();
String additional =
"-color=" + predictedI.classAttribute().name() + ",-hasErrors";
if (m_additionalOptions != null && m_additionalOptions.length() > 0) {
additional += "," + m_additionalOptions;
additional = environmentSubstitute(additional);
}
String[] optionsParts = additional.split(",");
for (String p : optionsParts) {
options.add(p.trim());
}
// if (predictedI.classAttribute().isNumeric()) {
options.add("-shapeSize=@@size@@");
// }
String xAxis = m_xAxis;
xAxis = environmentSubstitute(xAxis);
String yAxis = m_yAxis;
yAxis = environmentSubstitute(yAxis);
String width = m_width;
String height = m_height;
int defWidth = 500;
int defHeight = 400;
width = environmentSubstitute(width);
height = environmentSubstitute(height);
defWidth = Integer.parseInt(width);
defHeight = Integer.parseInt(height);
try {
return m_offscreenRenderer.renderXYScatterPlot(defWidth, defHeight,
m_offscreenPlotData, xAxis, yAxis, options);
} catch (Exception e1) {
throw new WekaException(e1);
}
}
/**
* Process incoming data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
PlotData2D errorD =
(PlotData2D) data.getPayloadElement(StepManager.CON_VISUALIZABLE_ERROR);
PlotData2D thresholdD =
(PlotData2D) data.getPayloadElement(StepManager.CON_THRESHOLD_DATA);
getStepManager().logDetailed(
"Processing "
+ (errorD != null ? " error data " + errorD.getPlotName()
: " threshold data " + thresholdD.getPlotName()));
if (data.getConnectionName().equals(StepManager.CON_VISUALIZABLE_ERROR)) {
m_plots.clear();
m_plots.add(errorD);
m_dataIsThresholdData = false;
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_IMAGE) > 0) {
// configure renderer if necessary
setupOffscreenRenderer();
m_offscreenPlotData = new ArrayList<Instances>();
BufferedImage bi = addOffscreenErrorPlot(errorD);
Data imageD = new Data(StepManager.CON_IMAGE);
imageD.setPayloadElement(StepManager.CON_IMAGE, bi);
getStepManager().outputData(StepManager.CON_IMAGE, imageD);
}
} else if (data.getConnectionName().equals(StepManager.CON_THRESHOLD_DATA)) {
if (m_plots.size() == 0) {
m_plots.add(thresholdD);
} else {
if (!m_plots.get(0).getPlotInstances().relationName()
.equals(thresholdD.getPlotInstances().relationName())) {
m_plots.clear();
}
m_plots.add(thresholdD);
}
m_dataIsThresholdData = true;
if (getStepManager().numOutgoingConnectionsOfType(StepManager.CON_IMAGE) > 0) {
// configure renderer if necessary
setupOffscreenRenderer();
if (m_offscreenPlotData == null || m_offscreenPlotData.size() == 0
|| !m_offscreenPlotData.get(0).relationName()
.equals(thresholdD.getPlotInstances().relationName())) {
m_offscreenPlotData = new ArrayList<Instances>();
m_thresholdSeriesTitles = new ArrayList<String>();
}
BufferedImage bi = addOffscreenThresholdPlot(thresholdD);
Data imageD = new Data(StepManager.CON_IMAGE);
imageD.setPayloadElement(StepManager.CON_IMAGE, bi);
imageD.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE,
thresholdD.getPlotName());
getStepManager().outputData(StepManager.CON_IMAGE, imageD);
}
}
getStepManager().finished();
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_plots.size() > 0) {
views.put("Show chart",
"weka.gui.knowledgeflow.steps.ModelPerformanceChartInteractiveView");
}
return views;
}
/**
* Get the plots currently stored in this step
*
* @return
*/
public List<PlotData2D> getPlots() {
return m_plots;
}
/**
* Returns true if the plots being stored are threshold plots
*
* @return true if the plots are threshold plots
*/
public boolean isDataIsThresholdData() {
return m_dataIsThresholdData;
}
/**
* Clear all plot data (both onscreen and offscreen)
*/
public void clearPlotData() {
m_plots.clear();
if (m_offscreenPlotData != null) {
m_offscreenPlotData.clear();
}
}
/**
* Retrieve the data (plots) stored in this step
*
* @return the data stored in this step
*/
@Override
public Object retrieveData() {
Object[] onAndOffScreen = new Object[2];
onAndOffScreen[0] = m_plots;
// onAndOffScreen[1] = m_offscreenPlotData;
onAndOffScreen[1] = m_dataIsThresholdData;
return onAndOffScreen;
}
/**
* Restore the data (plots) for this step
*
* @param data the data to set
* @throws WekaException if a problem occurs
*/
@SuppressWarnings("unchecked")
@Override
public void restoreData(Object data) throws WekaException {
if (!(data instanceof Object[])) {
throw new WekaException("Argument must be a three element array, "
+ "where the first element holds a list of Plot2D objects, the "
+ "second a list of Instances objects and the third "
+ "a boolean - true if the data is threshold data");
}
m_plots = ((List<PlotData2D>) ((Object[]) data)[0]);
// m_offscreenPlotData = ((List<Instances>) ((Object[]) data)[1]);
m_dataIsThresholdData = ((Boolean) ((Object[]) data)[1]);
m_offscreenPlotData = new ArrayList<Instances>();
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
// nothing to do
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.ModelPerformanceChartStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/NotPersistable.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* NotPersistable.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation for properties that should not be persisted
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface NotPersistable {
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Note.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Note.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.List;
/**
* A Knowledge Flow "step" that implements a note on the GUI layout
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public class Note extends BaseStep {
/** The text of the note */
protected String m_noteText = "New note";
/**
* Initialize - does nothing in the case of a note :-)
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Set the text of the note
*
* @param text the text
*/
public void setNoteText(String text) {
m_noteText = text;
}
/**
* Get the text of the note
*
* @return the text
*/
public String getNoteText() {
return m_noteText;
}
/**
* Get incoming connections accepted - none in the case of a note :-)
*
* @return a list of incoming connections
*/
@Override
public List<String> getIncomingConnectionTypes() {
return new ArrayList<String>();
}
/**
* Get outgoing connections produced - none in the case of a note :-)
*
* @return a list of outgoing connections
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return new ArrayList<String>();
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.NoteEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/PairedDataHelper.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* PairedDataHelper.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.WekaException;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* <p>
* A helper class that Step implementations can use when processing paired data
* (e.g. train and test sets). Has the concept of a primary and secondary
* connection/data type, where the secondary connection/data for a given set
* number typically needs to be processed using a result generated from the
* corresponding primary connection/data. This class takes care of ensuring that
* the secondary connection/data is only processed once the primary has
* completed. Users of this helper need to provide an implementation of the
* PairedProcessor inner interface, where the processPrimary() method will be
* called to process the primary data/connection (and return a result), and
* processSecondary() called to deal with the secondary connection/data. The
* result of execution on a particular primary data set number can be retrieved
* by calling the getIndexedPrimaryResult() method, passing in the set number of
* the primary result to retrieve.
* </p>
*
* This class also provides an arbitrary storage mechanism for additional
* results beyond the primary type of result. It also takes care of invoking
* processing() and finished() on the client step's StepManager.<br>
* <br>
*
* <pre>
* public class MyFunkyStep extends BaseStep
* implements PairedDataHelper.PairedProcessor<MyFunkyMainResult> {
* ...
* protected PairedDataHelper<MyFunkyMainResult> m_helper;
* ...
* public void stepInit() {
* m_helper = new PairedDataHelper<MyFunkyMainResult>(this, this,
* StepManager.[CON_WHATEVER_YOUR_PRIMARY_CONNECTION_IS],
* StepManager.[CON_WHATEVER_YOUR_SECONDARY_CONNECTION_IS]);
*
* ...
* }
*
* public void processIncoming(Data data) throws WekaException {
* // delegate to our helper to handle primary/secondary synchronization
* // issues
* m_helper.process(data);
* }
*
* public MyFunkyMainResult processPrimary(Integer setNum, Integer maxSetNun,
* Data data, PairedDataHelper<MyFunkyMainResult> helper) throws WekaException {
* SomeDataTypeToProcess someData = data.getPrimaryPayload();
*
* MyFunkyMainResult processor = new MyFunkyMainResult();
* // do some processing using MyFunkyMainResult and SomeDataToProcess
* ...
* // output some data to downstream steps if necessary
* ...
*
* return processor;
* }
*
* public void processSecondary(Integer setNum, Integer maxSetNum, Data data,
* PairedDataHelper<MyFunkyMainResult> helper) throws WekaException {
* SomeDataTypeToProcess someData = data.getPrimaryPayload();
*
* // get the MyFunkyMainResult for this set number
* MyFunkyMainResult result = helper.getIndexedPrimaryResult(setNum);
*
* // do some stuff with the result and the secondary data
* ...
* // output some data to downstream steps if necessary
* }
* }
* </pre>
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public class PairedDataHelper<P> implements java.io.Serializable {
/** For serialization */
private static final long serialVersionUID = -7813465607881227514L;
/**
* Storage of arbitrary indexed results computed during execution of
* PairedProcessor.processPrimary()
*/
protected Map<String, Map<Integer, Object>> m_namedIndexedStore =
new ConcurrentHashMap<String, Map<Integer, Object>>();
/** Storage of the indexed primary result */
protected Map<Integer, P> m_primaryResultMap =
new ConcurrentHashMap<Integer, P>();
/**
* Holds the secondary data objects, if they arrive before the corresponding
* primary has been computed
*/
protected Map<Integer, Data> m_secondaryDataMap =
new ConcurrentHashMap<Integer, Data>();
/** The type of connection to route to PairedProcessor.processPrimary() */
protected String m_primaryConType;
/** The type of connection to route to PairedProcessor.processSecondary() */
protected String m_secondaryConType;
/** The PairedProcessor implementation that will do the actual work */
protected transient PairedProcessor m_processor;
/** The step that owns this helper */
protected transient Step m_ownerStep;
/** Keep track of completed primary/secondary pairs */
protected transient AtomicInteger m_setCount;
/**
* Constructor
*
* @param owner the owner step
* @param processor the PairedProcessor implementation
* @param primaryConType the primary connection type
* @param secondaryConType the secondary connection type
*/
public PairedDataHelper(Step owner, PairedProcessor processor,
String primaryConType, String secondaryConType) {
m_primaryConType = primaryConType;
m_secondaryConType = secondaryConType;
m_ownerStep = owner;
m_processor = processor;
}
/**
* Initiate routing and processing for a particular data object
*
* @param data the data object to process
* @throws WekaException if a problem occurs
*/
public void process(Data data) throws WekaException {
if (m_ownerStep.getStepManager().isStopRequested()) {
m_ownerStep.getStepManager().interrupted();
return;
}
String connType = data.getConnectionName();
if (connType.equals(m_primaryConType)) {
processPrimary(data);
} else if (m_secondaryConType != null
&& connType.equals(m_secondaryConType)) {
processSecondary(data);
} else {
throw new WekaException("Illegal connection/data type: " + connType);
}
if (!m_ownerStep.getStepManager().isStopRequested()) {
if (m_setCount != null && m_setCount.get() == 0) {
m_ownerStep.getStepManager().finished();
// save memory
m_primaryResultMap.clear();
m_secondaryDataMap.clear();
m_namedIndexedStore.clear();
}
} else {
m_ownerStep.getStepManager().interrupted();
}
}
/**
* Handle the processing of the primary data/connection. Performs
* initialization in the case of receiving the first data object in a batch.
* Delegates actual processing work to the PairedProcessor.
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@SuppressWarnings("unchecked")
private void processPrimary(Data data) throws WekaException {
Integer setNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (m_setCount == null) {
m_setCount = new AtomicInteger(maxSetNum);
}
if (setNum == 1) {
m_ownerStep.getStepManager().processing();
m_ownerStep.getStepManager().statusMessage(
"Processing set/fold " + setNum + " out of " + maxSetNum);
}
if (!m_ownerStep.getStepManager().isStopRequested()) {
P result = (P) m_processor.processPrimary(setNum, maxSetNum, data, this);
if (result != null) {
m_primaryResultMap.put(setNum, result);
}
} else {
m_ownerStep.getStepManager().interrupted();
return;
}
Data waitingSecondary = m_secondaryDataMap.get(setNum);
if (waitingSecondary != null) {
processSecondary(waitingSecondary);
} else if (m_secondaryConType == null) {
// no secondary connection
m_setCount.decrementAndGet();
}
}
/**
* Handle processing of the secondary data/connection. Stores the secondary if
* there is no corresponding primary result generated yet. Delegates actual
* processing work to the PairedProcessor
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@SuppressWarnings("unchecked")
private synchronized void processSecondary(Data data) throws WekaException {
Integer setNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
Integer maxSetNum =
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
P primaryData = m_primaryResultMap.get(setNum);
if (primaryData == null) {
// store, ready for the arrival of the matching primary data
m_secondaryDataMap.put(setNum, data);
return;
}
if (!m_ownerStep.getStepManager().isStopRequested()) {
m_processor.processSecondary(setNum, maxSetNum, data, this);
} else {
m_ownerStep.getStepManager().interrupted();
return;
}
m_setCount.decrementAndGet();
}
/**
* Retrieve the primary result corresponding to a given set number
*
* @param index the set number of the result to get
* @return the primary result
*/
public P getIndexedPrimaryResult(int index) {
return m_primaryResultMap.get(index);
}
/**
* Reset the helper. The helper must be reset between runs if it is being
* re-used (as opposed to a new helper instance being created).
*/
public void reset() {
// dont' reset if we're still processing!
if (m_setCount != null && m_setCount.get() > 0
&& !m_ownerStep.getStepManager().isStopRequested()) {
return;
}
m_setCount = null;
}
/**
* Return true if there is no further processing to be done
*
* @return true if processing is done
*/
public boolean isFinished() {
return m_setCount.get() == 0;
}
/**
* Create a indexed store with a given name
*
* @param name the name of the store to create
*/
public void createNamedIndexedStore(String name) {
m_namedIndexedStore.put(name, new ConcurrentHashMap<Integer, Object>());
}
/**
* Gets an indexed value from a named store
*
* @param storeName the name of the store to retrieve from
* @param index the index of the value to get
* @param <T> the type of the value
* @return the requested value or null if either the store does not exist or
* the value does not exist in the store.
*/
@SuppressWarnings("unchecked")
public <T> T getIndexedValueFromNamedStore(String storeName, Integer index) {
Map<Integer, Object> store = m_namedIndexedStore.get(storeName);
if (store != null) {
return (T) store.get(index);
}
return null;
}
/**
* Adds a value to a named store with the given index. Creates the named store
* if it doesn't already exist.
*
* @param storeName the name of the store to add to
* @param index the index to associate with the value
* @param value the value to store
*/
public synchronized void addIndexedValueToNamedStore(String storeName,
Integer index, Object value) {
Map<Integer, Object> store = m_namedIndexedStore.get(storeName);
if (store == null) {
createNamedIndexedStore(storeName);
store = m_namedIndexedStore.get(storeName);
}
store.put(index, value);
}
/**
* Interface for processors of paired data to implement. See the description
* in the class documentation of PairedDataHelper.
*/
public interface PairedProcessor<P> {
P processPrimary(Integer setNum, Integer maxSetNum, Data data,
PairedDataHelper<P> helper) throws WekaException;
void processSecondary(Integer setNum, Integer maxSetNum, Data data,
PairedDataHelper<P> helper) throws WekaException;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/PredictionAppender.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* PredictionAppender.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.UpdateableClassifier;
import weka.classifiers.misc.InputMappedClassifier;
import weka.clusterers.DensityBasedClusterer;
import weka.core.*;
import weka.filters.unsupervised.attribute.Add;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Step that can produce data with predictions appended from batch or
* incremental classifiers and clusterers
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "PredictionAppender",
category = "Evaluation",
toolTipText = "Append predictions from classifiers or clusterers to incoming data ",
iconPath = KFGUIConsts.BASE_ICON_PATH + "PredictionAppender.gif")
public class PredictionAppender extends BaseStep {
private static final long serialVersionUID = 3558618759400903936L;
/** True if probabilities are to be appended */
protected boolean m_appendProbabilities;
/** Holds structure of streaming output */
protected Instances m_streamingOutputStructure;
/** Re-usable Data object for streaming output */
protected Data m_instanceData = new Data(StepManager.CON_INSTANCE);
/** Keep track of indexes of string attributes in the streaming case */
protected List<Integer> m_stringAttIndexes;
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_streamingOutputStructure = null;
}
/**
* Get the incoming connection types that this step accepts
*
* @return a list of acceptable incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays
.asList(StepManager.CON_BATCH_CLASSIFIER,
StepManager.CON_INCREMENTAL_CLASSIFIER,
StepManager.CON_BATCH_CLUSTERER);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce at this
* time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_BATCH_CLASSIFIER) > 0
|| getStepManager().numIncomingConnectionsOfType(
StepManager.CON_BATCH_CLUSTERER) > 0) {
result.add(StepManager.CON_TRAININGSET);
result.add(StepManager.CON_TESTSET);
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INCREMENTAL_CLASSIFIER) > 0) {
result.add(StepManager.CON_INSTANCE);
}
return result;
}
/**
* Process incoming data
*
* @param data the Data object to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Instances trainingData =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET);
Instances testData =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TESTSET);
Instance streamInstance =
(Instance) data.getPayloadElement(StepManager.CON_AUX_DATA_TEST_INSTANCE);
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_BATCH_CLASSIFIER) > 0) {
processBatchClassifierCase(data, trainingData, testData);
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INCREMENTAL_CLASSIFIER) > 0) {
processIncrementalClassifier(data, streamInstance);
} else if (getStepManager().numIncomingConnectionsOfType(
StepManagerImpl.CON_BATCH_CLUSTERER) > 0) {
processBatchClustererCase(data, trainingData, testData);
}
}
/**
* Process an incremental classifier
*
* @param data the Data object to process
* @param inst the instance to process
* @throws WekaException if a problem occurs
*/
protected void processIncrementalClassifier(Data data, Instance inst)
throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
// done
// notify downstream steps of end of stream
Data d = new Data(StepManager.CON_INSTANCE);
getStepManager().throughputFinished(d);
return;
}
getStepManager().throughputUpdateStart();
boolean labelOrNumeric =
!m_appendProbabilities || inst.classAttribute().isNumeric();
weka.classifiers.Classifier classifier =
(weka.classifiers.Classifier) data
.getPayloadElement(StepManager.CON_INCREMENTAL_CLASSIFIER);
if (m_streamingOutputStructure == null) {
// start of stream
if (classifier == null) {
throw new WekaException("No classifier in incoming data object!");
}
if (!(classifier instanceof UpdateableClassifier)) {
throw new WekaException("Classifier in data object is not "
+ "an UpdateableClassifier!");
}
m_stringAttIndexes = new ArrayList<Integer>();
for (int i = 0; i < inst.numAttributes(); i++) {
if (inst.attribute(i).isString()) {
m_stringAttIndexes.add(i);
}
}
try {
m_streamingOutputStructure =
makeOutputDataClassifier(inst.dataset(), classifier, !labelOrNumeric,
"_with_predictions");
} catch (Exception ex) {
throw new WekaException(ex);
}
}
double[] instanceVals =
new double[m_streamingOutputStructure.numAttributes()];
Instance newInstance = null;
for (int i = 0; i < inst.numAttributes(); i++) {
instanceVals[i] = inst.value(i);
}
if (!m_appendProbabilities || inst.classAttribute().isNumeric()) {
try {
double predClass = classifier.classifyInstance(inst);
instanceVals[instanceVals.length - 1] = predClass;
} catch (Exception ex) {
throw new WekaException(ex);
}
} else if (m_appendProbabilities) {
try {
double[] preds = classifier.distributionForInstance(inst);
int index = 0;
for (int i = instanceVals.length - inst.classAttribute().numValues(); i < instanceVals.length; i++) {
instanceVals[i] = preds[index++];
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
Instance newInst = new DenseInstance(inst.weight(), instanceVals);
newInst.setDataset(m_streamingOutputStructure);
// check for string attributes
if (m_stringAttIndexes != null) {
for (int i = 0; i < m_stringAttIndexes.size(); i++) {
int index = m_stringAttIndexes.get(i);
m_streamingOutputStructure.attribute(index).setStringValue(
inst.stringValue(index));
}
}
m_instanceData.setPayloadElement(StepManagerImpl.CON_INSTANCE, newInst);
if (isStopRequested()) {
return;
}
getStepManager().throughputUpdateEnd();
getStepManager().outputData(m_instanceData.getConnectionName(),
m_instanceData);
}
/**
* Process a batch classifier
*
* @param data the Data object to process
* @param trainingData the training data (can be null)
* @param testData the test data (can be null)
* @throws WekaException if a problem occurs
*/
protected void processBatchClustererCase(Data data, Instances trainingData,
Instances testData) throws WekaException {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
weka.clusterers.Clusterer clusterer =
(weka.clusterers.Clusterer) data
.getPayloadElement(StepManager.CON_BATCH_CLUSTERER);
int setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
int maxSetNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
String relationNameModifier = "_set_" + setNum + "_of_" + maxSetNum;
if (m_appendProbabilities && !(clusterer instanceof DensityBasedClusterer)) {
throw new WekaException(
"Only DensityBasedClusterers can append probabilities.");
}
try {
getStepManager().processing();
boolean clusterLabel =
!m_appendProbabilities || !(clusterer instanceof DensityBasedClusterer);
Instances newTrainInstances =
trainingData != null ? makeOutputDataClusterer(trainingData, clusterer,
!clusterLabel, relationNameModifier) : null;
Instances newTestInstances =
testData != null ? makeOutputDataClusterer(testData, clusterer,
!clusterLabel, relationNameModifier) : null;
if (newTrainInstances != null
&& getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
for (int i = 0; i < newTrainInstances.numInstances(); i++) {
if (clusterLabel) {
predictLabelClusterer(clusterer, newTrainInstances.instance(i),
trainingData.instance(i));
} else {
predictProbabilitiesClusterer((DensityBasedClusterer) clusterer,
newTrainInstances.instance(i), trainingData.instance(i));
}
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
Data outTrain = new Data(StepManager.CON_TRAININGSET);
outTrain.setPayloadElement(StepManager.CON_TRAININGSET,
newTrainInstances);
outTrain.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outTrain.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outTrain);
}
if (newTestInstances != null
&& (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TESTSET) > 0 || getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_DATASET) > 0)) {
for (int i = 0; i < newTestInstances.numInstances(); i++) {
if (clusterLabel) {
predictLabelClusterer(clusterer, newTestInstances.instance(i),
testData.instance(i));
} else {
predictProbabilitiesClusterer((DensityBasedClusterer) clusterer,
newTestInstances.instance(i), testData.instance(i));
}
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TESTSET) > 0) {
Data outTest = new Data(StepManager.CON_TESTSET);
outTest.setPayloadElement(StepManager.CON_TESTSET, newTestInstances);
outTest.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outTest.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outTest);
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
Data outData = new Data(StepManager.CON_DATASET);
outData.setPayloadElement(StepManager.CON_DATASET, newTestInstances);
outData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outData);
}
}
getStepManager().finished();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a batch classifier
*
* @param data the Data object to process
* @param trainingData the training data (can be null)
* @param testData the test data (can be null)
* @throws WekaException if a problem occurs
*/
protected void processBatchClassifierCase(Data data, Instances trainingData,
Instances testData) throws WekaException {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
weka.classifiers.Classifier classifier =
(weka.classifiers.Classifier) data
.getPayloadElement(StepManager.CON_BATCH_CLASSIFIER);
int setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
int maxSetNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
String relationNameModifier = "_set_" + setNum + "_of_" + maxSetNum;
boolean classNumeric =
trainingData != null ? trainingData.classAttribute().isNumeric()
: testData.classAttribute().isNumeric();
boolean labelOrNumeric = !m_appendProbabilities || classNumeric;
try {
getStepManager().processing();
Instances newTrainInstances =
trainingData != null ? makeOutputDataClassifier(trainingData,
classifier, !labelOrNumeric, relationNameModifier) : null;
Instances newTestInstances =
testData != null ? makeOutputDataClassifier(testData, classifier,
!labelOrNumeric, relationNameModifier) : null;
if (newTrainInstances != null
&& getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
for (int i = 0; i < newTrainInstances.numInstances(); i++) {
if (labelOrNumeric) {
predictLabelClassifier(classifier, newTrainInstances.instance(i),
trainingData.instance(i));
} else {
predictProbabilitiesClassifier(classifier,
newTrainInstances.instance(i), trainingData.instance(i));
}
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
Data outTrain = new Data(StepManager.CON_TRAININGSET);
outTrain.setPayloadElement(StepManager.CON_TRAININGSET,
newTrainInstances);
outTrain.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outTrain.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outTrain);
}
if (newTestInstances != null
&& (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TESTSET) > 0 || getStepManager()
.numOutgoingConnectionsOfType(StepManager.CON_DATASET) > 0)) {
for (int i = 0; i < newTestInstances.numInstances(); i++) {
if (labelOrNumeric) {
predictLabelClassifier(classifier, newTestInstances.instance(i),
testData.instance(i));
} else {
predictProbabilitiesClassifier(classifier,
newTestInstances.instance(i), testData.instance(i));
}
}
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (getStepManager().numOutgoingConnectionsOfType(
StepManager.CON_TESTSET) > 0) {
Data outTest = new Data(StepManager.CON_TESTSET);
outTest.setPayloadElement(StepManager.CON_TESTSET, newTestInstances);
outTest.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outTest.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outTest);
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_DATASET) > 0) {
Data outData = new Data(StepManager.CON_DATASET);
outData.setPayloadElement(StepManager.CON_DATASET, newTestInstances);
outData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, setNum);
outData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
maxSetNum);
getStepManager().outputData(outData);
}
}
getStepManager().finished();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Add a cluster label to an instance using a clusterer
*
* @param clusterer the clusterer to use
* @param inst the instance to append a prediction to
* @param instOrig the original instance
* @throws WekaException if a problem occurs
*/
protected void predictLabelClusterer(weka.clusterers.Clusterer clusterer,
Instance inst, Instance instOrig) throws WekaException {
try {
int cluster = clusterer.clusterInstance(instOrig);
inst.setValue(inst.numAttributes() - 1, (double) cluster);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Add a distribution over cluster labels to an instance using a
* DensityBasedClusterer
*
* @param clusterer the clusterer to use
* @param inst the instance to append a prediction to
* @param instOrig the original instance
* @throws WekaException if a problem occurs
*/
protected void predictProbabilitiesClusterer(DensityBasedClusterer clusterer,
Instance inst, Instance instOrig) throws WekaException {
try {
double[] preds = clusterer.distributionForInstance(instOrig);
for (int i = 0; i < preds.length; i++) {
inst.setValue(inst.numAttributes() - preds.length + i, preds[i]);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Add a label to an instance using a classifier
*
* @param classifier the classifier to use
* @param inst the instance to append prediction to
* @param instOrig the original instance
* @throws WekaException if a problem occurs
*/
protected void predictLabelClassifier(weka.classifiers.Classifier classifier,
Instance inst, Instance instOrig) throws WekaException {
try {
double pred = classifier.classifyInstance(instOrig);
inst.setValue(inst.numAttributes() - 1, pred);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Add a distribution over class labels to an instance using a classifier
*
* @param classifier the classifier to use
* @param inst the instance to append prediction to
* @param instOrig the original instance
* @throws WekaException if a problem occurs
*/
protected void predictProbabilitiesClassifier(
weka.classifiers.Classifier classifier, Instance inst, Instance instOrig)
throws WekaException {
try {
double[] preds = classifier.distributionForInstance(instOrig);
for (int i = 0; i < preds.length; i++) {
inst.setValue(inst.numAttributes() - preds.length + i, preds[i]);
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Make an output dataset for a clusterer. Either a single attribute is added
* for holding cluster labels, or a series of attributes are added in order to
* hold predicted cluster distribution
*
* @param inputData the incoming data
* @param clusterer the clusterer
* @param distribution true if a distribution over cluster labels will be
* predicted
* @param relationNameModifier modifier to add to the incoming relation name
* @return the output dataset
* @throws Exception if a problem occurs
*/
protected Instances makeOutputDataClusterer(Instances inputData,
weka.clusterers.Clusterer clusterer, boolean distribution,
String relationNameModifier) throws Exception {
String clustererName = clusterer.getClass().getName();
clustererName =
clustererName.substring(clustererName.lastIndexOf('.') + 1,
clustererName.length());
Instances newData = new Instances(inputData);
if (distribution) {
for (int i = 0; i < clusterer.numberOfClusters(); i++) {
Add addF = new Add();
addF.setAttributeIndex("last");
addF.setAttributeName("prob_cluster" + i);
addF.setInputFormat(newData);
newData = weka.filters.Filter.useFilter(newData, addF);
}
} else {
Add addF = new Add();
addF.setAttributeIndex("last");
addF.setAttributeName("assigned_cluster: " + clustererName);
String clusterLabels = "0";
for (int i = 1; i <= clusterer.numberOfClusters() - 1; i++) {
clusterLabels += "," + i;
}
addF.setNominalLabels(clusterLabels);
addF.setInputFormat(newData);
newData = weka.filters.Filter.useFilter(newData, addF);
}
newData.setRelationName(inputData.relationName() + relationNameModifier);
return newData;
}
/**
* Make an output dataset for a classifier. Either a single attribute is added
* for holding class labels, or a series of attributes are added in order to
* hold predicted class distribution
*
* @param inputData the incoming data
* @param classifier the classifier
* @param distribution true if a distribution over class labels will be
* predicted
* @param relationNameModifier modifier to add to the incoming relation name
* @return the output dataset
* @throws Exception if a problem occurs
*/
protected Instances makeOutputDataClassifier(Instances inputData,
weka.classifiers.Classifier classifier, boolean distribution,
String relationNameModifier) throws Exception {
// get class attribute from InputMappedClassifier (if necessary)
Attribute classAttribute = inputData.classAttribute();
if (classifier instanceof weka.classifiers.misc.InputMappedClassifier) {
classAttribute = ((InputMappedClassifier) classifier).getModelHeader(new Instances(inputData, 0)).classAttribute();
}
String classifierName = classifier.getClass().getName();
classifierName =
classifierName.substring(classifierName.lastIndexOf('.') + 1,
classifierName.length());
Instances newData = new Instances(inputData);
if (distribution) {
for (int i = 0; i < classAttribute.numValues(); i++) {
Add addF = new Add();
addF.setAttributeIndex("last");
addF.setAttributeName(classifierName + "_prob_"
+ classAttribute.value(i));
addF.setInputFormat(newData);
newData = weka.filters.Filter.useFilter(newData, addF);
}
} else {
Add addF = new Add();
addF.setAttributeIndex("last");
addF.setAttributeName("class_predicted_by: " + classifierName);
if (classAttribute.isNominal()) {
String classLabels = classAttribute.value(0);
for (int i = 1; i < classAttribute.numValues(); i++) {
classLabels += "," + classAttribute.value(i);
}
addF.setNominalLabels(classLabels);
}
addF.setInputFormat(inputData);
newData = weka.filters.Filter.useFilter(inputData, addF);
}
newData.setRelationName(inputData.relationName() + relationNameModifier);
return newData;
}
/**
* Set whether to append probability distributions rather than predicted
* classes
*
* @param append true to append probability distributions
*/
public void setAppendProbabilities(boolean append) {
m_appendProbabilities = append;
}
/**
* Get whether to append probability distributions rather than predicted
* classes
*
* @return true if probability distributions are to be appended
*/
@OptionMetadata(displayName = "Append probabilities",
description = "Append probabilities")
public boolean getAppendProbabilities() {
return m_appendProbabilities;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Saver.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Saver.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.SerializedObject;
import weka.core.Utils;
import weka.core.WekaException;
import weka.core.converters.DatabaseConverter;
import weka.core.converters.DatabaseSaver;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
/**
* Step that wraps weka.core.converters.Saver classes
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Saver", category = "DataSinks", toolTipText = "Weka saver wrapper", iconPath = "")
public class Saver extends WekaAlgorithmWrapper implements Serializable {
private static final long serialVersionUID = 6831606284211403465L;
/**
* Holds the structure
*/
protected Instances m_structure;
/** The actual saver instance to use */
protected weka.core.converters.Saver m_saver;
/** True if the saver is a DatabaseSaver */
protected boolean m_isDBSaver;
/**
* For file-based savers - if true (default), relation name is used as the
* primary part of the filename. If false, then the prefix is used as the
* filename. Useful for preventing filenames from getting too long when there
* are many filters in a flow.
*/
private boolean m_relationNameForFilename = true;
/**
* Get the class of the wrapped algorithm
*
* @return the class of the wrapped algorithm
*/
@Override
public Class getWrappedAlgorithmClass() {
return weka.core.converters.Saver.class;
}
/**
* Set the actual wrapped algorithm instance
*
* @param algo the wrapped algorithm instance
*/
@Override
public void setWrappedAlgorithm(final Object algo) {
super.setWrappedAlgorithm(algo);
this.m_defaultIconPath = StepVisual.BASE_ICON_PATH + "DefaultDataSink.gif";
}
/**
* Get the saver instance that is wrapped by this step. Convenience method
* that delegates to {@code getWrappedAlgorithm()}
*
* @return the saver instance that is wrapped by this step
*/
public weka.core.converters.Saver getSaver() {
return (weka.core.converters.Saver) this.getWrappedAlgorithm();
}
/**
* Set the saver instance that is wrapped by this step. Convenience method
* that delegates to {@code setWrappedAlgorithm()}.
*
* @param saver the saver instance that is wrapped by this step
*/
@ProgrammaticProperty
public void setSaver(final weka.core.converters.Saver saver) {
this.setWrappedAlgorithm(saver);
}
/**
* Get whether the relation name is the primary part of the filename.
*
* @return true if the relation name is part of the filename.
*/
public boolean getRelationNameForFilename() {
return this.m_relationNameForFilename;
}
/**
* Set whether to use the relation name as the primary part of the filename.
* If false, then the prefix becomes the filename.
*
* @param r true if the relation name is to be part of the filename.
*/
public void setRelationNameForFilename(final boolean r) {
this.m_relationNameForFilename = r;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
this.m_saver = null;
if (!(this.getWrappedAlgorithm() instanceof weka.core.converters.Saver)) {
throw new WekaException("Incorrect type of algorithm");
}
if (this.getWrappedAlgorithm() instanceof DatabaseConverter) {
this.m_isDBSaver = true;
}
int numNonInstanceInputs = this.getStepManager().numIncomingConnectionsOfType(StepManager.CON_DATASET) + this.getStepManager().numIncomingConnectionsOfType(StepManager.CON_TRAININGSET)
+ this.getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET);
int numInstanceInput = this.getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE);
if (numNonInstanceInputs > 0 && numInstanceInput > 0) {
WekaException cause = new WekaException("Can't have both instance and batch-based incomming connections!");
cause.fillInStackTrace();
this.getStepManager().logError(cause.getMessage(), cause);
throw new WekaException(cause);
}
}
/**
* Save a batch of instances
*
* @param data the {@code Instances} to save
* @param setNum the set/fold number of this batch
* @param maxSetNum the maximum number of sets/folds in this batch
* @param connectionName the connection type that this batch arrived in
* @throws WekaException if a problem occurs
*/
protected void saveBatch(final Instances data, final Integer setNum, final Integer maxSetNum, final String connectionName) throws WekaException {
this.getStepManager().processing();
try {
weka.core.converters.Saver saver = (weka.core.converters.Saver) new SerializedObject(this.m_saver).getObject();
if (this.m_saver instanceof EnvironmentHandler) {
((EnvironmentHandler) saver).setEnvironment(this.getStepManager().getExecutionEnvironment().getEnvironmentVariables());
}
String fileName = this.sanitizeFilename(data.relationName());
String additional = setNum != null && (setNum + maxSetNum != 2) ? "_" + connectionName + "_" + setNum + "_of_" + maxSetNum : "";
if (!this.m_isDBSaver) {
saver.setDirAndPrefix(fileName, additional);
} else {
if (((DatabaseSaver) saver).getRelationForTableName()) {
((DatabaseSaver) saver).setTableName(fileName);
}
((DatabaseSaver) saver).setRelationForTableName(false);
String setName = ((DatabaseSaver) saver).getTableName();
setName = setName.replaceFirst("_" + connectionName + "_[0-9]+_of_[0-9]+", "");
((DatabaseSaver) saver).setTableName(setName + additional);
}
saver.setInstances(data);
this.getStepManager().logBasic("Saving " + data.relationName() + additional);
this.getStepManager().statusMessage("Saving " + data.relationName() + additional);
saver.writeBatch();
if (!this.isStopRequested()) {
this.getStepManager().logBasic("Save successful");
this.getStepManager().statusMessage("Finished.");
} else {
this.getStepManager().interrupted();
}
} catch (Exception ex) {
WekaException e = new WekaException(ex);
// e.printStackTrace();
throw e;
} finally {
this.getStepManager().finished();
}
}
/**
* Processes incoming data
*
* @param data the data process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(final Data data) throws WekaException {
if (this.m_saver == null) {
try {
this.m_saver = (weka.core.converters.Saver) new SerializedObject(this.getWrappedAlgorithm()).getObject();
} catch (Exception ex) {
throw new WekaException(ex);
}
if (this.m_saver instanceof EnvironmentHandler) {
((EnvironmentHandler) this.m_saver).setEnvironment(this.getStepManager().getExecutionEnvironment().getEnvironmentVariables());
}
if (data.getConnectionName().equalsIgnoreCase(StepManager.CON_INSTANCE)) {
// incremental saving
Instance forStructure = (Instance) data.getPayloadElement(StepManager.CON_INSTANCE);
if (forStructure != null) {
// processing();
this.m_saver.setRetrieval(weka.core.converters.Saver.INCREMENTAL);
String fileName = this.sanitizeFilename(forStructure.dataset().relationName());
try {
this.m_saver.setDirAndPrefix(fileName, "");
} catch (Exception ex) {
throw new WekaException(ex);
}
try {
this.m_saver.setInstances(forStructure.dataset());
} catch (InterruptedException e) {
e.printStackTrace();
}
if (this.m_isDBSaver) {
if (((DatabaseSaver) this.m_saver).getRelationForTableName()) {
((DatabaseSaver) this.m_saver).setTableName(fileName);
((DatabaseSaver) this.m_saver).setRelationForTableName(false);
}
}
}
}
}
if (data.getConnectionName().equals(StepManager.CON_DATASET) || data.getConnectionName().equals(StepManager.CON_TRAININGSET) || data.getConnectionName().equals(StepManager.CON_TESTSET)) {
this.m_saver.setRetrieval(weka.core.converters.Saver.BATCH);
Instances theData = (Instances) data.getPayloadElement(data.getConnectionName());
Integer setNum = (Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
Integer maxSetNum = (Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
this.saveBatch(theData, setNum, maxSetNum, data.getConnectionName());
return;
}
Instance toSave = (Instance) data.getPayloadElement(StepManager.CON_INSTANCE);
boolean streamEnd = this.getStepManager().isStreamFinished(data);
try {
if (streamEnd) {
this.m_saver.writeIncremental(null);
this.getStepManager().throughputFinished(new Data(StepManagerImpl.CON_INSTANCE));
return;
}
if (!this.isStopRequested()) {
this.getStepManager().throughputUpdateStart();
this.m_saver.writeIncremental(toSave);
} else {
// make sure that saver finishes and closes file
this.m_saver.writeIncremental(null);
}
this.getStepManager().throughputUpdateEnd();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Get a list of incoming connection types that this step can receive at this
* time
*
* @return a list of incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
int numInstance = this.getStepManager().getIncomingConnectedStepsOfConnectionType(StepManager.CON_INSTANCE).size();
int numNonInstance = this.getStepManager().getIncomingConnectedStepsOfConnectionType(StepManager.CON_DATASET).size() + this.getStepManager().getIncomingConnectedStepsOfConnectionType(StepManager.CON_TRAININGSET).size()
+ this.getStepManager().getIncomingConnectedStepsOfConnectionType(StepManager.CON_TESTSET).size();
if (numInstance + numNonInstance == 0) {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET, StepManager.CON_TESTSET, StepManager.CON_INSTANCE);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce at this
* time
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
// no outgoing connections
return new ArrayList<String>();
}
/**
* makes sure that the filename is valid, i.e., replaces slashes, backslashes
* and colons with underscores ("_"). Also try to prevent filename from
* becoming insanely long by removing package part of class names.
*
* @param filename the filename to cleanse
* @return the cleansed filename
*/
protected String sanitizeFilename(String filename) {
filename = filename.replaceAll("\\\\", "_").replaceAll(":", "_").replaceAll("/", "_");
filename = Utils.removeSubstring(filename, "weka.filters.supervised.instance.");
filename = Utils.removeSubstring(filename, "weka.filters.supervised.attribute.");
filename = Utils.removeSubstring(filename, "weka.filters.unsupervised.instance.");
filename = Utils.removeSubstring(filename, "weka.filters.unsupervised.attribute.");
filename = Utils.removeSubstring(filename, "weka.clusterers.");
filename = Utils.removeSubstring(filename, "weka.associations.");
filename = Utils.removeSubstring(filename, "weka.attributeSelection.");
filename = Utils.removeSubstring(filename, "weka.estimators.");
filename = Utils.removeSubstring(filename, "weka.datagenerators.");
if (!this.m_isDBSaver && !this.m_relationNameForFilename) {
filename = "";
try {
if (this.m_saver.filePrefix().equals("")) {
this.m_saver.setFilePrefix("no-name");
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
return filename;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.SaverStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/ScatterPlotMatrix.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ScatterPlotMatrix.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.gui.knowledgeflow.KFGUIConsts;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Step that collects data for display in a scatter plot matrix.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "ScatterPlotMatrix", category = "Visualization",
toolTipText = "Visualize datasets in a scatter plot matrix",
iconPath = KFGUIConsts.BASE_ICON_PATH + "ScatterPlotMatrix.gif")
public class ScatterPlotMatrix extends BaseSimpleDataVisualizer {
private static final long serialVersionUID = -2033576643553187310L;
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_data.size() > 0) {
views.put("Show plots",
"weka.gui.knowledgeflow.steps.ScatterPlotMatrixInteractiveView");
}
return views;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SendToPerspective.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SendToPerspectiveStepEditorDialog.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.GetPerspectiveNamesGraphicalCommand;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.gui.knowledgeflow.SendToPerspectiveGraphicalCommand;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.List;
/**
* Step that can send incoming instances to a perspective. Only operates
* in a graphical (i.e. non-headless) environment.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "SendToPerspective", category = "Flow",
toolTipText = "Send instances to a perspective (graphical environment only)",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DiamondPlain.gif")
public class SendToPerspective extends BaseStep {
private static final long serialVersionUID = 7322550048407408819L;
protected String m_perspectiveName = "";
public void setPerspectiveName(String name) {
m_perspectiveName = name;
}
public String getPerspectiveName() {
return m_perspectiveName;
}
@Override
public void stepInit() throws WekaException {
}
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
if (getStepManager().getExecutionEnvironment().isHeadless()) {
getStepManager().logWarning(
"Unable to send data to perspective due to "
+ "execution in a headless environment.");
} else {
if (m_perspectiveName == null || m_perspectiveName.length() == 0) {
getStepManager().logWarning("No perspective specified");
} else {
List<String> visiblePerspectives =
getStepManager()
.getExecutionEnvironment()
.getGraphicalEnvironmentCommandHandler()
.performCommand(
GetPerspectiveNamesGraphicalCommand.GET_PERSPECTIVE_NAMES_KEY);
if (!visiblePerspectives.contains(m_perspectiveName)) {
throw new WekaException("The perspective to send to '"
+ m_perspectiveName + "' does not seem to be available");
}
Instances toSend = data.getPrimaryPayload();
if (toSend != null) {
getStepManager()
.getExecutionEnvironment()
.getGraphicalEnvironmentCommandHandler()
.performCommand(
SendToPerspectiveGraphicalCommand.SEND_TO_PERSPECTIVE_COMMAND_KEY,
m_perspectiveName, toSend);
}
}
}
getStepManager().finished();
}
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<>();
if (getStepManager().numIncomingConnections() == 0) {
result.add(StepManager.CON_DATASET);
result.add(StepManager.CON_TRAININGSET);
result.add(StepManager.CON_TESTSET);
}
return result;
}
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SerializedModelSaver.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SerializedModelSaver.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.classifiers.UpdateableBatchProcessor;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Step that can save models encapsulated in incoming {@code Data} objects to
* the filesystem.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "SerializedModelSaver", category = "DataSinks",
toolTipText = "A step that saves models to the file system",
iconPath = KFGUIConsts.BASE_ICON_PATH + "SerializedModelSaver.gif")
public class SerializedModelSaver extends BaseStep {
private static final long serialVersionUID = -8343162241983197708L;
/** Stores the header of data used to build an incremental model */
protected Instances m_incrementalHeader;
/**
* How often to save an incremental classifier (<= 0 means only at the end of
* the stream)
*/
protected int m_incrementalSaveSchedule;
/**
* Whether to include the relation name of the data in the file name for the
* model
*/
protected boolean m_includeRelationName;
/**
* The prefix for the file name (model + training set info will be appended)
*/
private String m_filenamePrefix = "";
/**
* The directory to hold the saved model(s)
*/
private File m_directory = new File(System.getProperty("user.dir"));
/** Counter for use when processing incremental classifier connections */
protected int m_counter;
/**
* Set the directory to save to
*
* @param directory the directory to save to
*/
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.SAVE_DIALOG,
directoriesOnly = true)
@OptionMetadata(displayName = "Output directory",
description = "The directory to save models to", displayOrder = 0)
public void setOutputDirectory(File directory) {
m_directory = directory;
}
/**
* Get the directory to save to
*
* @return the directory to save to
*/
public File getOutputDirectory() {
return m_directory;
}
/**
* Set the text to prepend to the filename
*
* @param filenamePrefix the prefix to add to the filename
*/
@OptionMetadata(displayName = "Filename prefix",
description = "A prefix to prepend to the filename", displayOrder = 1)
public void setFilenamePrefix(String filenamePrefix) {
m_filenamePrefix = filenamePrefix;
}
/**
* Get the text to prepend to the filename
*
* @return the prefix to add to the filename
*/
public String getFilenamePrefix() {
return m_filenamePrefix;
}
/**
* Set how frequently to save an incremental model
*
* @param schedule how often (i.e. every x updates) to save the model. <= 0
* indicates that the save will happen just once, at the end of the
* stream.
*/
@OptionMetadata(displayName = "Incremental save schedule",
description = "How frequently to save incremental classifiers ("
+ "<= 0 indicates that the save will happen just once, at the "
+ "end of the stream", displayOrder = 4)
public void setIncrementalSaveSchedule(int schedule) {
m_incrementalSaveSchedule = schedule;
}
/**
* Get how frequently to save an incremental model
*
* @return how often (i.e. every x updates) to save the model. <= 0 indicates
* that the save will happen just once, at the end of the stream.
*/
public int getIncrementalSaveSchedule() {
return m_incrementalSaveSchedule;
}
/**
* Set whether to include the relation name as part of the filename
*
* @param includeRelationName true to include the relation name as part of the
* filename
*/
@OptionMetadata(
displayName = "Include relation name in file name",
description = "Whether to include the relation name of the data as part of the "
+ "file name", displayOrder = 2)
public
void setIncludeRelationNameInFilename(boolean includeRelationName) {
m_includeRelationName = includeRelationName;
}
/**
* Get whether to include the relation name as part of the filename
*
* @return true if the relation name will be included as part of the filename
*/
public boolean getIncludeRelationNameInFilename() {
return m_includeRelationName;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<String>();
result.add(StepManager.CON_BATCH_CLASSIFIER);
result.add(StepManager.CON_INCREMENTAL_CLASSIFIER);
result.add(StepManager.CON_BATCH_CLUSTERER);
result.add(StepManager.CON_BATCH_ASSOCIATOR);
return result;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return new ArrayList<String>();
}
/**
* Initialize the step
*/
@Override
public void stepInit() {
m_incrementalHeader = null;
m_counter = 0;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Object modelToSave = null;
Instances modelHeader = null;
Integer setNum = null;
Integer maxSetNum = null;
if (data.getConnectionName().equals(StepManager.CON_INCREMENTAL_CLASSIFIER)) {
if (m_incrementalHeader == null
&& !getStepManager().isStreamFinished(data)) {
m_incrementalHeader =
((Instance) data
.getPayloadElement(StepManager.CON_AUX_DATA_TEST_INSTANCE))
.dataset();
}
if (getStepManager().isStreamFinished(data)
|| (m_incrementalSaveSchedule > 0
&& m_counter % m_incrementalSaveSchedule == 0 && m_counter > 0)) {
modelToSave =
(weka.classifiers.Classifier) data
.getPayloadElement(StepManager.CON_INCREMENTAL_CLASSIFIER);
modelHeader = m_incrementalHeader;
}
} else {
modelToSave = data.getPayloadElement(data.getConnectionName());
modelHeader =
(Instances) data
.getPayloadElement(StepManager.CON_AUX_DATA_TRAININGSET);
setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
maxSetNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM);
if (modelHeader == null) {
modelHeader =
(Instances) data.getPayloadElement(StepManager.CON_AUX_DATA_TESTSET);
}
}
if (modelToSave != null) {
if (modelToSave instanceof UpdateableBatchProcessor) {
try {
// make sure model cleans up before saving
((UpdateableBatchProcessor) modelToSave).batchFinished();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
if (modelHeader != null) {
modelHeader = new Instances(modelHeader, 0);
}
getStepManager().processing();
String prefix = getStepManager().environmentSubstitute(m_filenamePrefix);
String relationName =
m_includeRelationName && modelHeader != null ? modelHeader
.relationName() : "";
String setSpec =
maxSetNum != null && setNum != null ? "_" + setNum + "_" + maxSetNum
+ "_" : "";
String modelName = modelToSave.getClass().getCanonicalName();
modelName =
modelName.substring(modelName.lastIndexOf(".") + 1, modelName.length());
String filename = "" + prefix + relationName + setSpec + modelName;
filename = sanitizeFilename(filename);
String dirName =
getStepManager().environmentSubstitute(m_directory.toString());
File tempFile = new File(dirName);
filename = tempFile.getAbsolutePath() + File.separator + filename;
getStepManager().logBasic(
"Saving model " + modelToSave.getClass().getCanonicalName() + " to "
+ filename + ".model");
getStepManager().statusMessage(
"Saving model: " + modelToSave.getClass().getCanonicalName());
ObjectOutputStream oos = null;
try {
oos =
new ObjectOutputStream(new BufferedOutputStream(new FileOutputStream(
new File(filename + ".model"))));
oos.writeObject(modelToSave);
if (modelHeader != null) {
oos.writeObject(modelHeader);
}
oos.close();
} catch (Exception ex) {
throw new WekaException(ex);
} finally {
if (data.getConnectionName() != StepManager.CON_INCREMENTAL_CLASSIFIER
|| getStepManager().isStreamFinished(data)) {
getStepManager().finished();
}
if (oos != null) {
try {
oos.close();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
}
m_counter++;
}
/**
* makes sure that the filename is valid, i.e., replaces slashes, backslashes
* and colons with underscores ("_").
*
* @param filename the filename to cleanse
* @return the cleansed filename
*/
protected static String sanitizeFilename(String filename) {
return filename.replaceAll("\\\\", "_").replaceAll(":", "_")
.replaceAll("/", "_");
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SetPropertiesFromEnvironment.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SetPropertiesFromEnvironment.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.EnumHelper;
import weka.core.Environment;
import weka.core.OptionHandler;
import weka.core.SelectedTag;
import weka.core.Tag;
import weka.core.Utils;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Step that accesses property values stored in the flow environment and
* attempts to set them on the algorithm-based step that it is connected to.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "SetPropertiesFromEnvironment",
category = "Flow",
toolTipText = "Set properties of the connected algorithm-based step (e.g. "
+ "Classifier, Clusterer etc.) using values stored in the flow environment. "
+ "If no property path for a particular setting value is specified, then it is "
+ "assumed that the value provided is scheme name + options in command-line "
+ "form, in which case the underlying scheme of the connected step will be "
+ "constructed and set; otherwise, the property path is used to set a value "
+ "on the existing underlying scheme.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "SetPropertiesFromEnvironment.gif")
public class SetPropertiesFromEnvironment extends BaseStep {
private static final long serialVersionUID = -8316084792512232973L;
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
// all the action happens here in the initialization
Environment env =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (env instanceof JobEnvironment
&& getStepManager().numOutgoingConnections() == 1) {
Map<String, List<StepManager>> outgoing =
getStepManager().getOutgoingConnections();
StepManagerImpl connectedManager = null;
for (Map.Entry<String, List<StepManager>> e : outgoing.entrySet()) {
connectedManager = (StepManagerImpl) e.getValue().get(0);
}
if (connectedManager != null) {
Step connectedStep = connectedManager.getManagedStep();
String stepName = connectedStep.getName();
Map<String, String> propertiesToSet =
((JobEnvironment) env).getStepProperties(stepName);
if (propertiesToSet != null && propertiesToSet.size() > 0) {
if (connectedStep instanceof WekaAlgorithmWrapper) {
// only handle algorithm wrappers (just about everything
// else can handle variables
setProperties((WekaAlgorithmWrapper) connectedStep, propertiesToSet);
}
}
}
}
}
/**
* Get a list of acceptable incoming connection types
*
* @return a list of acceptable incoming connection types
*/
@Override
public List<String> getIncomingConnectionTypes() {
return null;
}
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numOutgoingConnections() == 0) {
return Arrays.asList(StepManager.CON_INFO);
}
return null;
}
/**
* Set properties on the specified target objet
*
* @param target the target to set properties on
* @param propertiesToSet the properties to try and set
*/
protected void setProperties(WekaAlgorithmWrapper target,
Map<String, String> propertiesToSet) {
for (Map.Entry<String, String> e : propertiesToSet.entrySet()) {
String propName = e.getKey();
String propVal = e.getValue().trim();
if (propVal.length() == 0) {
continue;
}
try {
if (propName.length() == 0) {
// assume the value is scheme + options for specifying the
// wrapped algorithm
String[] schemeAndOpts = Utils.splitOptions(propVal);
if (schemeAndOpts.length > 0) {
String schemeName = schemeAndOpts[0];
schemeAndOpts[0] = "";
Object valToSet = Utils.forName(null, schemeName, schemeAndOpts);
setValue(target, target.getName(), "wrappedAlgorithm", valToSet);
}
} else {
// single property on the wrapped algorithm
String[] propPath = propName.split("\\.");
Object propRoot = target.getWrappedAlgorithm();
String propToSet = propPath[propPath.length - 1];
List<String> remainingPath = new ArrayList<>();
for (int i = 0; i < propPath.length - 1; i++) {
remainingPath.add(propPath[i]);
}
if (remainingPath.size() > 0) {
propRoot = drillToProperty(propRoot, remainingPath);
}
Object valToSet = stringToVal(propVal, propRoot, propToSet);
setValue(propRoot, propRoot.getClass().getCanonicalName(), propToSet,
valToSet);
}
} catch (Exception ex) {
String pN = propName.length() == 0 ? "wrapped algorithm" : propName;
getStepManager().logWarning(
"Unable to set " + pN + " with value: " + propVal + " on step "
+ target.getName() + ". Reason: " + ex.getMessage());
}
}
// re-initialize (just in case KF environment has called initStep() on
// the target WekaAlgorithmWrapper before we get to set its properties
try {
target.stepInit();
} catch (WekaException e) {
getStepManager().logWarning(
"Was unable to re-initialize step '" + target.getName()
+ "' after setting properties");
}
}
/**
* Attempts to convert a property value in string form to the object type that
* the actual property accepts
*
* @param propVal the string representation of the property value
* @param target the target object to receive the property value
* @param propName the name of the property
* @return the property value to set as an object
* @throws WekaException if a problem occurs
*/
protected Object stringToVal(String propVal, Object target, String propName)
throws WekaException {
Object resultVal = null;
try {
PropertyDescriptor prop = getPropDescriptor(target, propName);
if (prop == null) {
throw new WekaException("Unable to find method '" + propName + "'");
}
Method getMethod = prop.getReadMethod();
Object current = getMethod.invoke(target);
if (current.getClass().isArray()) {
resultVal = Utils.forName(null, propVal, null);
} else if (current instanceof SelectedTag) {
Tag[] legalTags = ((SelectedTag) current).getTags();
int tagIndex = Integer.MAX_VALUE;
// first try and parse as an integer
try {
int specifiedID = Integer.parseInt(propVal);
for (int z = 0; z < legalTags.length; z++) {
if (legalTags[z].getID() == specifiedID) {
tagIndex = z;
break;
}
}
} catch (NumberFormatException e) {
// try to match tag strings
for (int z = 0; z < legalTags.length; z++) {
if (legalTags[z].getReadable().equals(propVal.trim())) {
tagIndex = z;
break;
}
}
}
if (tagIndex != Integer.MAX_VALUE) {
resultVal = new SelectedTag(tagIndex, legalTags);
} else {
throw new WekaException(
"Unable to set SelectedTag value for property " + "'" + propName
+ "'");
}
} else if (current instanceof Enum) {
EnumHelper helper = new EnumHelper((Enum) current);
resultVal = EnumHelper.valueFromString(helper.getEnumClass(), propVal);
} else if (current instanceof OptionHandler) {
String[] schemeAndOpts = Utils.splitOptions(propVal);
if (schemeAndOpts.length > 0) {
String schemeName = schemeAndOpts[0];
schemeAndOpts[0] = "";
resultVal = Utils.forName(null, schemeName, schemeAndOpts);
}
} else if (current instanceof Number) {
try {
if (current instanceof Integer) {
resultVal = new Integer(propVal);
} else if (current instanceof Long) {
resultVal = new Long(propVal);
} else if (current instanceof Double) {
resultVal = new Double(propVal);
} else if (current instanceof Float) {
resultVal = new Float(propVal);
}
} catch (NumberFormatException ex) {
throw new WekaException("Unable to parse '" + propVal
+ "' as a number");
}
} else if (current instanceof Boolean) {
resultVal =
propVal.equalsIgnoreCase("true") || propVal.equalsIgnoreCase("yes")
|| propVal.equalsIgnoreCase("Y");
} else if (current instanceof String) {
resultVal = propVal;
} else if (current instanceof File) {
resultVal = new File(propVal);
}
if (resultVal == null) {
throw new WekaException("Was unable to determine the value to set for "
+ "property '" + propName + "'");
}
} catch (Exception ex) {
throw new WekaException(ex);
}
return resultVal;
}
/**
* Sets the value of the property on the target object
*
* @param target the target object
* @param targetName the name of the step owning the target object
* @param propName the property name
* @param valToSet the value to set
* @throws WekaException if a problem occurs
*/
protected void setValue(Object target, String targetName, String propName,
Object valToSet) throws WekaException {
try {
getStepManager().logDebug(
"Attempting to set property '" + propName + "' "
+ "with value of type '" + valToSet.getClass().getCanonicalName()
+ " '(" + valToSet + ") on '" + targetName + "'");
PropertyDescriptor prop = getPropDescriptor(target, propName);
if (prop == null) {
throw new WekaException("Unable to find method '" + propName + "'");
}
Method setMethod = prop.getWriteMethod();
setMethod.invoke(target, valToSet);
} catch (Exception e) {
throw new WekaException(e);
}
}
/**
* Gets the property descriptor for the supplied property name on the
* supplied target object
*
* @param target the target object
* @param propName the property name
* @return the property descriptor, or null if no such property exists
* @throws IntrospectionException if a problem occurs
*/
protected PropertyDescriptor
getPropDescriptor(Object target, String propName)
throws IntrospectionException {
PropertyDescriptor result = null;
BeanInfo bi = Introspector.getBeanInfo(target.getClass());
PropertyDescriptor[] properties = bi.getPropertyDescriptors();
for (PropertyDescriptor p : properties) {
if (p.getName().equals(propName)) {
result = p;
break;
}
}
return result;
}
/**
* Drill down to the last element in the supplied property path list on
* the given base object
*
* @param baseObject the base object to drill down
* @param propertyPath the property path to traverse
* @return the Object corresponding to the path
* @throws WekaException if a problem occurs
*/
protected Object
drillToProperty(Object baseObject, List<String> propertyPath)
throws WekaException {
Object objectBeingConfigured = baseObject;
if (propertyPath != null) {
for (String methodName : propertyPath) {
try {
boolean isArray = methodName.endsWith("]");
int arrayIndex = -1;
if (isArray) {
String arrayPart =
methodName.substring(methodName.indexOf('[') + 1,
methodName.lastIndexOf(']'));
arrayIndex = Integer.parseInt(arrayPart.trim());
methodName = methodName.substring(0, methodName.indexOf('['));
}
BeanInfo bi =
Introspector.getBeanInfo(objectBeingConfigured.getClass());
PropertyDescriptor[] properties = bi.getPropertyDescriptors();
PropertyDescriptor targetProperty = null;
for (PropertyDescriptor p : properties) {
if (p.getName().equals(methodName)) {
targetProperty = p;
break;
}
}
if (targetProperty == null) {
throw new WekaException(
"Unable to find accessor method for property path part: "
+ methodName + " on object "
+ objectBeingConfigured.getClass().getName());
}
Method getMethod = targetProperty.getReadMethod();
Object[] args = {};
objectBeingConfigured = getMethod.invoke(objectBeingConfigured, args);
if (isArray) {
// get the indexed element
if (!objectBeingConfigured.getClass().isArray()) {
throw new WekaException("Property path element '" + methodName
+ "' was specified as an array type, but the "
+ "resulting object retrieved "
+ "from this property is not an array!");
}
objectBeingConfigured =
((Object[]) objectBeingConfigured)[arrayIndex];
}
} catch (IntrospectionException ex) {
throw new WekaException("GOEManager: couldn't introspect", ex);
} catch (InvocationTargetException e) {
throw new WekaException("Invocation target exception when invoking "
+ methodName + " on " + objectBeingConfigured.getClass().getName(),
e);
} catch (IllegalAccessException e) {
throw new WekaException(e);
}
}
}
return objectBeingConfigured;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SetVariables.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SetVariables.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Attribute;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Step that can be used to set the values of environment variables for the flow
* being executed. Can be useful when testing flows that use environment
* variables (that would typically have values set appropriately at runtime in a
* production setting). This step is special in the sense the the Knowledge Flow
* checks for it and invokes its stepInit() method (thus setting variables)
* before initializing all other steps in the flow. It can also be used to set
* 'dynamic' variables based on the values of attributes in incoming instances.
* Dynamic variables are not guaranteed to be available to other steps in the
* same flow at runtime. Instead, they are meant to be used by a directly
* connected (via 'variables' connection) 'Job' step, which will execute a
* specified sub-flow for each 'variables' data object received.
*
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "SetVariables",
category = "Flow",
toolTipText = "Assign default values for static variables, if not already set, and "
+ "for dynamic variables. Static variables are guaranteed to be available to "
+ "all other steps at initialization as the Knowledge Flow makes sure that "
+ "SetVariables is invoked first first. Dynamic variables can have their "
+ "values set using the values of attributes from incoming instances. Dynamic "
+ "variables are *not* guaranteed to be available to other steps in the flow - "
+ "instead, they are intended for use by a directly connected 'Job' step, which "
+ "will execute a specified sub-flow for each 'variables' data object received.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "SetVariables.gif")
public class SetVariables extends BaseStep {
/** Separators for internal variable specification */
public static final String SEP1 = "@@vv@@";
public static final String SEP2 = "@v@v";
public static final String SEP3 = "@a@a";
private static final long serialVersionUID = 8042350408846800738L;
/** Holds static variables in internal representation */
protected String m_internalRep = "";
/** Holds dynamic variables in internal representation */
protected String m_dynamicInternalRep = "";
/** Map of variables to set with fixed values */
protected Map<String, String> m_varsToSet =
new LinkedHashMap<String, String>();
/**
* Map of variables to set based on the values of attributes in incoming
* instances
*/
protected Map<String, List<String>> m_varsToSetFromIncomingInstances =
new LinkedHashMap<>();
/**
* OK if there is at least one specified attribute in the incoming instance
* structure
*/
protected boolean m_structureOK;
/** True if the structure has been checked */
protected boolean m_structureCheckComplete;
/**
* True if an exception should be raised when an attribute value being used to
* set a variable is missing, instead of using a default value
*/
protected boolean m_raiseErrorWhenValueMissing;
/**
* Set the static variables to set (in internal representation)
*
* @param rep the variables to set
*/
@ProgrammaticProperty
public void setVarsInternalRep(String rep) {
m_internalRep = rep;
}
/**
* Get the variables to set (in internal representation)
*
* @return the variables to set
*/
public String getVarsInternalRep() {
return m_internalRep;
}
@ProgrammaticProperty
public void setDynamicVarsInternalRep(String rep) {
m_dynamicInternalRep = rep;
}
public String getDynamicVarsInternalRep() {
return m_dynamicInternalRep;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_structureCheckComplete = false;
m_structureOK = false;
m_varsToSet = internalToMap(m_internalRep);
m_varsToSetFromIncomingInstances =
internalDynamicToMap(m_dynamicInternalRep);
Environment currentEnv =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (currentEnv == null) {
throw new WekaException(
"The execution environment doesn't seem to have any support for variables");
}
if (!(currentEnv instanceof JobEnvironment)) {
currentEnv = new JobEnvironment(currentEnv);
getStepManager().getExecutionEnvironment().setEnvironmentVariables(
currentEnv);
}
for (Map.Entry<String, String> e : m_varsToSet.entrySet()) {
String key = e.getKey();
String value = e.getValue();
if (key != null && key.length() > 0 && value != null
&& currentEnv.getVariableValue(key) == null) {
getStepManager()
.logDetailed("Setting variable: " + key + " = " + value);
currentEnv.addVariable(key, value);
}
}
if (getStepManager().numIncomingConnections() > 0
&& m_varsToSetFromIncomingInstances.size() == 0) {
getStepManager().logWarning(
"Incoming data detected, but no variables to set from incoming "
+ "instances have been defined");
}
}
@Override
public void processIncoming(Data data) throws WekaException {
if (!m_structureCheckComplete) {
m_structureCheckComplete = true;
Instances structure = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
structure = ((Instance) data.getPrimaryPayload()).dataset();
} else if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
structure =
((Instance) data.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE))
.dataset();
} else {
structure = data.getPrimaryPayload();
}
checkStructure(structure);
}
getStepManager().processing();
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)
|| data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (getStepManager().isStreamFinished(data)) {
Data finished = new Data(StepManager.CON_ENVIRONMENT);
if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES));
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES));
}
getStepManager().throughputFinished(finished);
return;
}
Instance toProcess =
(Instance) (data.getConnectionName().equals(StepManager.CON_INSTANCE) ? data
.getPrimaryPayload() : data
.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE));
getStepManager().throughputUpdateStart();
processInstance(toProcess,
data.getConnectionName().equals(StepManager.CON_ENVIRONMENT) ? data
: null);
getStepManager().throughputUpdateEnd();
} else {
Instances insts = data.getPrimaryPayload();
for (int i = 0; i < insts.numInstances(); i++) {
if (isStopRequested()) {
break;
}
processInstance(insts.instance(i), null);
Data finished = new Data(StepManager.CON_ENVIRONMENT);
getStepManager().throughputFinished(finished);
}
if (isStopRequested()) {
getStepManager().interrupted();
}
}
}
protected void processInstance(Instance inst, Data existingEnv)
throws WekaException {
Map<String, String> vars = new HashMap<>();
for (Map.Entry<String, List<String>> e : m_varsToSetFromIncomingInstances
.entrySet()) {
String attName = environmentSubstitute(e.getKey());
Attribute current = inst.dataset().attribute(attName);
int index = -1;
if (current != null) {
index = current.index();
} else {
// try as a 1-based index
try {
index = Integer.parseInt(attName);
index--; // make zero-based
} catch (NumberFormatException ex) {
// ignore
}
}
if (index != -1) {
String varToSet = environmentSubstitute(e.getValue().get(0));
String val = environmentSubstitute(e.getValue().get(1));
if (inst.isMissing(index)) {
if (m_raiseErrorWhenValueMissing) {
throw new WekaException("Value of attribute '"
+ inst.attribute(index).name()
+ "' was missing in current instance");
}
} else {
val = inst.stringValue(index);
}
vars.put(varToSet, val);
}
}
Environment env =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
for (Map.Entry<String, String> e : vars.entrySet()) {
env.addVariable(e.getKey(), e.getValue());
}
if (existingEnv != null) {
Map<String, String> existingVars =
existingEnv
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES);
if (existingVars != null) {
vars.putAll(existingVars);
}
}
Data output = new Data(StepManager.CON_ENVIRONMENT);
output.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
vars);
if (existingEnv != null) {
output.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
existingEnv
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES));
}
// make sure that each data output is in the same thread
output.setPayloadElement(StepManager.CON_AUX_DATA_INSTANCE, inst);
output.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
getStepManager().outputData(output);
}
protected void checkStructure(Instances structure) {
List<String> notFoundInIncoming = new ArrayList<>();
for (String attName : m_varsToSetFromIncomingInstances.keySet()) {
if (structure.attribute(attName) == null) {
notFoundInIncoming.add(attName);
} else {
m_structureOK = true;
}
}
if (notFoundInIncoming.size() == m_varsToSetFromIncomingInstances.size()) {
getStepManager().logWarning(
"None of the specified attributes appear to be "
+ "in the incoming instance structure");
return;
}
for (String s : notFoundInIncoming) {
getStepManager().logWarning(
"Attribute '" + s + "' was not found in the "
+ "incoming instance structure");
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET,
StepManager.CON_INSTANCE, StepManager.CON_ENVIRONMENT);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() != 0) {
return Arrays.asList(StepManager.CON_ENVIRONMENT);
}
return new ArrayList<String>();
}
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (getStepManager().numIncomingConnections() == 0
|| (!connectionName.equals(StepManager.CON_DATASET)
&& !connectionName.equals(StepManager.CON_TRAININGSET) && !connectionName
.equals(StepManager.CON_TESTSET))
&& !connectionName.equals(StepManager.CON_INSTANCE)
&& !connectionName.equals(StepManager.CON_ENVIRONMENT)) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
return getStepManager().getIncomingStructureForConnectionType(
connectionName);
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.SetVariablesStepEditorDialog";
}
public static Map<String, List<String>> internalDynamicToMap(
String internalRep) {
Map<String, List<String>> varsToSet = new LinkedHashMap<>();
if (internalRep != null && internalRep.length() > 0) {
String[] parts = internalRep.split(SEP1);
for (String p : parts) {
String[] attVal = p.split(SEP3);
if (attVal.length == 2) {
String attName = attVal[0].trim();
String[] varDefault = attVal[1].trim().split(SEP2);
String varName = varDefault[0].trim();
String defaultV = "";
if (varDefault.length == 2) {
defaultV = varDefault[1].trim();
}
List<String> varAndDefL = new ArrayList<>();
varAndDefL.add(varName);
varAndDefL.add(defaultV);
varsToSet.put(attName, varAndDefL);
}
}
}
return varsToSet;
}
/**
* Convert a string in the internal static variable representation to a map of
* variables + values
*
* @param internalRep the variables in internal represenation
* @return a map of variables + values
*/
public static Map<String, String> internalToMap(String internalRep) {
Map<String, String> varsToSet = new LinkedHashMap<String, String>();
if (internalRep != null && internalRep.length() > 0) {
String[] parts = internalRep.split(SEP1);
for (String p : parts) {
String[] keyVal = p.trim().split(SEP2);
if (keyVal.length == 2) {
varsToSet.put(keyVal[0].trim(), keyVal[1]);
}
}
}
return varsToSet;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Sorter.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Sorter.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import weka.core.Attribute;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Step for sorting instances according to one or more attributes.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "Sorter", category = "Tools",
toolTipText = "Sort instances in ascending or descending order according "
+ "to the values of user-specified attributes. Instances can be sorted "
+ "according to multiple attributes (defined in order). Handles datasets "
+ "larger than can be fit into main memory via instance connections and "
+ "specifying the in-memory buffer size. Implements a merge-sort by writing "
+ "the sorted in-memory buffer to a file when full and then interleaving "
+ "instances from the disk-based file(s) when the incoming stream has "
+ "finished.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "Sorter.gif")
public class Sorter extends BaseStep {
private static final long serialVersionUID = 3373283983192467264L;
/** Comparator that applies the sort rules */
protected transient SortComparator m_sortComparator;
/** In memory buffer for incremental operation */
protected transient List<InstanceHolder> m_incrementalBuffer;
/** List of sorted temp files for incremental operation */
protected transient List<File> m_bufferFiles;
/** Size of the in-memory buffer */
protected String m_bufferSize = "10000";
/** Size of the in-memory buffer after resolving any environment vars */
protected int m_bufferSizeI = 10000;
/** Holds indexes of string attributes, keyed by attribute name */
protected Map<String, Integer> m_stringAttIndexes;
/** Holds the internal textual description of the sort definitions */
protected String m_sortDetails;
/**
* The directory to hold the temp files - if not set the system tmp directory
* is used
*/
protected File m_tempDirectory = new File("");
/** format of instances for current incoming connection (if any) */
protected Instances m_connectedFormat;
/** True if we've been reset */
protected boolean m_isReset;
/** True if processing streaming data */
protected boolean m_streaming;
/** To (re)use when streaming */
protected Data m_streamingData;
/**
* Get the size of the in-memory buffer
*
* @return the size of the in-memory buffer
*/
public String getBufferSize() {
return m_bufferSize;
}
/**
* Set the size of the in-memory buffer
*
* @param buffSize the size of the in-memory buffer
*/
@OptionMetadata(displayName = "Size of in-mem streaming buffer",
description = "Number of instances to sort in memory before writing to a "
+ "temp file (instance connections only)", displayOrder = 1)
public void setBufferSize(String buffSize) {
m_bufferSize = buffSize;
}
/**
* Set the directory to use for temporary files during incremental operation
*
* @param tempDir the temp dir to use
*/
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = true)
@OptionMetadata(displayName = "Directory for temp files",
description = "Where to store temporary files when spilling to disk",
displayOrder = 2)
public void setTempDirectory(File tempDir) {
m_tempDirectory = tempDir;
}
/**
* Get the directory to use for temporary files during incremental operation
*
* @return the temp dir to use
*/
public File getTempDirectory() {
return m_tempDirectory;
}
/**
* Set the sort rules to use
*
* @param sortDetails the sort rules in internal string representation
*/
@ProgrammaticProperty
public void setSortDetails(String sortDetails) {
m_sortDetails = sortDetails;
}
/**
* Get the sort rules to use
*
* @return the sort rules in internal string representation
*/
public String getSortDetails() {
return m_sortDetails;
}
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_streaming = false;
m_stringAttIndexes = new HashMap<String, Integer>();
m_bufferFiles = new ArrayList<File>();
m_streamingData = new Data(StepManager.CON_INSTANCE);
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_INSTANCE) > 0) {
result.add(StepManager.CON_INSTANCE);
}
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_DATASET) > 0) {
result.add(StepManager.CON_DATASET);
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_TRAININGSET) > 0) {
result.add(StepManager.CON_TRAININGSET);
}
if (getStepManager().numIncomingConnectionsOfType(StepManager.CON_TESTSET) > 0) {
result.add(StepManager.CON_TESTSET);
}
return result;
}
/**
* Initialize given the supplied instances structure
*
* @param structure the structure to initialize with
*/
protected void init(Instances structure) {
m_connectedFormat = structure;
List<SortRule> sortRules = new ArrayList<SortRule>();
if (m_sortDetails != null && m_sortDetails.length() > 0) {
String[] sortParts = m_sortDetails.split("@@sort-rule@@");
for (String s : sortParts) {
SortRule r = new SortRule(s.trim());
r.init(getStepManager().getExecutionEnvironment()
.getEnvironmentVariables(), structure);
sortRules.add(r);
}
m_sortComparator = new SortComparator(sortRules);
}
// check for string attributes
m_stringAttIndexes = new HashMap<String, Integer>();
for (int i = 0; i < structure.numAttributes(); i++) {
if (structure.attribute(i).isString()) {
m_stringAttIndexes.put(structure.attribute(i).name(), new Integer(i));
}
}
if (m_stringAttIndexes.size() == 0) {
m_stringAttIndexes = null;
}
if (m_streaming) {
String buffSize = environmentSubstitute(m_bufferSize);
m_bufferSizeI = Integer.parseInt(buffSize);
m_incrementalBuffer = new ArrayList<InstanceHolder>(m_bufferSizeI);
}
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_isReset) {
Instances structure;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
Instance inst = data.getPrimaryPayload();
structure = new Instances(inst.dataset(), 0);
m_streaming = true;
getStepManager().logBasic(
"Starting streaming sort. Using streaming " + "buffer size: "
+ m_bufferSizeI);
m_isReset = false;
} else {
structure = data.getPrimaryPayload();
structure = new Instances(structure, 0);
}
init(structure);
}
if (m_streaming) {
processIncremental(data);
} else {
processBatch(data);
}
if (isStopRequested()) {
getStepManager().interrupted();
} else if (!m_streaming) {
getStepManager().finished();
}
}
/**
* Process batch data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected void processBatch(Data data) throws WekaException {
getStepManager().processing();
Instances insts = data.getPrimaryPayload();
getStepManager().logBasic("Sorting " + insts.relationName());
List<InstanceHolder> instances = new ArrayList<InstanceHolder>();
for (int i = 0; i < insts.numInstances(); i++) {
InstanceHolder h = new InstanceHolder();
h.m_instance = insts.instance(i);
instances.add(h);
}
Collections.sort(instances, m_sortComparator);
Instances output = new Instances(insts, 0);
for (int i = 0; i < instances.size(); i++) {
output.add(instances.get(i).m_instance);
}
Data outputD = new Data(data.getConnectionName(), output);
outputD.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM));
outputD.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM));
getStepManager().outputData(outputD);
}
/**
* Process incremental data
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected void processIncremental(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
emitBufferedInstances();
} else {
getStepManager().throughputUpdateStart();
InstanceHolder tempH = new InstanceHolder();
tempH.m_instance = data.getPrimaryPayload();
tempH.m_fileNumber = -1; // unused here
if (m_stringAttIndexes != null) {
copyStringAttVals(tempH);
}
m_incrementalBuffer.add(tempH);
if (m_incrementalBuffer.size() == m_bufferSizeI) {
// time to sort and write this to a temp file
try {
sortBuffer(true);
} catch (Exception ex) {
throw new WekaException(ex);
}
}
getStepManager().throughputUpdateEnd();
}
}
/**
* Output any buffered instances
*
* @throws WekaException if a problem occurs
*/
protected void emitBufferedInstances() throws WekaException {
if (isStopRequested()) {
return;
}
if (m_incrementalBuffer.size() > 0) {
try {
getStepManager().throughputUpdateStart();
sortBuffer(false);
getStepManager().throughputUpdateEnd();
} catch (Exception ex) {
throw new WekaException(ex);
}
if (m_bufferFiles.size() == 0) {
// we only have the in memory buffer
getStepManager().logDetailed("Emitting in memory buffer");
Instances newHeader =
new Instances(m_incrementalBuffer.get(0).m_instance.dataset(), 0);
for (int i = 0; i < m_incrementalBuffer.size(); i++) {
getStepManager().throughputUpdateStart();
InstanceHolder currentH = m_incrementalBuffer.get(i);
currentH.m_instance.setDataset(newHeader);
if (m_stringAttIndexes != null) {
for (String attName : m_stringAttIndexes.keySet()) {
boolean setValToZero =
newHeader.attribute(attName).numValues() > 0;
newHeader.attribute(attName).setStringValue(
currentH.m_stringVals.get(attName));
if (setValToZero) {
currentH.m_instance.setValue(newHeader.attribute(attName), 0);
}
}
}
if (isStopRequested()) {
return;
}
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE,
currentH.m_instance);
getStepManager().throughputUpdateEnd();
getStepManager().outputData(m_streamingData);
if (i == m_incrementalBuffer.size() - 1) {
// end of stream
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
}
}
return;
}
}
List<ObjectInputStream> inputStreams = new ArrayList<ObjectInputStream>();
// for the interleaving part of the merge sort
List<InstanceHolder> merger = new ArrayList<InstanceHolder>();
Instances tempHeader = new Instances(m_connectedFormat, 0);
// add an instance from the in-memory buffer first
if (m_incrementalBuffer.size() > 0) {
InstanceHolder tempH = m_incrementalBuffer.remove(0);
merger.add(tempH);
}
if (isStopRequested()) {
return;
}
if (m_bufferFiles.size() > 0) {
getStepManager().logDetailed("Merging temp files");
}
// open all temp buffer files and read one instance from each
for (int i = 0; i < m_bufferFiles.size(); i++) {
ObjectInputStream ois = null;
try {
FileInputStream fis = new FileInputStream(m_bufferFiles.get(i));
BufferedInputStream bis = new BufferedInputStream(fis, 50000);
ois = new ObjectInputStream(bis);
InstanceHolder tempH = (InstanceHolder) ois.readObject();
if (tempH != null) {
inputStreams.add(ois);
tempH.m_fileNumber = i;
merger.add(tempH);
} else {
// no instances?!??
ois.close();
}
} catch (Exception ex) {
if (ois != null) {
try {
ois.close();
} catch (Exception e) {
throw new WekaException(e);
}
}
throw new WekaException(ex);
}
}
Collections.sort(merger, m_sortComparator);
int mergeCount = 0;
do {
if (isStopRequested()) {
return;
}
InstanceHolder holder = merger.remove(0);
holder.m_instance.setDataset(tempHeader);
if (m_stringAttIndexes != null) {
for (String attName : m_stringAttIndexes.keySet()) {
boolean setValToZero =
(tempHeader.attribute(attName).numValues() > 1);
tempHeader.attribute(attName).setStringValue(
holder.m_stringVals.get(attName));
if (setValToZero) {
holder.m_instance.setValue(tempHeader.attribute(attName), 0);
}
}
}
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE,
holder.m_instance);
mergeCount++;
getStepManager().outputData(m_streamingData);
getStepManager().throughputUpdateStart();
if (mergeCount % m_bufferSizeI == 0) {
getStepManager().logDetailed("Merged " + mergeCount + " instances");
}
int smallest = holder.m_fileNumber;
// now get another instance from the source of "smallest"
InstanceHolder nextH = null;
if (smallest == -1) {
if (m_incrementalBuffer.size() > 0) {
nextH = m_incrementalBuffer.remove(0);
nextH.m_fileNumber = -1;
}
} else {
ObjectInputStream tis = inputStreams.get(smallest);
try {
InstanceHolder tempH = (InstanceHolder) tis.readObject();
if (tempH != null) {
nextH = tempH;
nextH.m_fileNumber = smallest;
} else {
throw new Exception("end of buffer");
}
} catch (Exception ex) {
// EOF
try {
getStepManager().logDetailed("Closing temp file");
tis.close();
} catch (Exception e) {
throw new WekaException(ex);
}
File file = m_bufferFiles.remove(smallest);
// file.delete();
inputStreams.remove(smallest);
// update file numbers
for (InstanceHolder h : merger) {
if (h.m_fileNumber != -1 && h.m_fileNumber > smallest) {
h.m_fileNumber--;
}
}
}
}
if (nextH != null) {
// find the correct position (i.e. interleave) for this new Instance
int index = Collections.binarySearch(merger, nextH, m_sortComparator);
if (index < 0) {
merger.add(index * -1 - 1, nextH);
} else {
merger.add(index, nextH);
}
nextH = null;
}
getStepManager().throughputUpdateEnd();
} while (merger.size() > 0 && !isStopRequested());
if (!isStopRequested()) {
// signal end of stream
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
} else {
// try an close any input streams still open...
for (ObjectInputStream is : inputStreams) {
try {
is.close();
} catch (Exception ex) {
// ignore
}
}
}
}
/**
* Sort the buffer
*
* @param write true if the buffer sould be written to a tmp file
* @throws Exception if a problem occurs
*/
private void sortBuffer(boolean write) throws Exception {
getStepManager().logBasic("Sorting in memory buffer");
Collections.sort(m_incrementalBuffer, m_sortComparator);
if (!write) {
return;
}
if (isStopRequested()) {
return;
}
String tmpDir = m_tempDirectory.toString();
File tempFile = File.createTempFile("Sorter", ".tmp");
if (tmpDir != null && tmpDir.length() > 0) {
tmpDir = environmentSubstitute(tmpDir);
File tempDir = new File(tmpDir);
if (tempDir.exists() && tempDir.canWrite()) {
String filename = tempFile.getName();
tempFile = new File(tmpDir + File.separator + filename);
tempFile.deleteOnExit();
}
}
getStepManager().logDebug("Temp file: " + tempFile.toString());
m_bufferFiles.add(tempFile);
FileOutputStream fos = new FileOutputStream(tempFile);
BufferedOutputStream bos = new BufferedOutputStream(fos, 50000);
ObjectOutputStream oos = new ObjectOutputStream(bos);
getStepManager().logDetailed(
"Writing buffer to temp file " + m_bufferFiles.size()
+ ". Buffer contains " + m_incrementalBuffer.size() + " instances");
for (int i = 0; i < m_incrementalBuffer.size(); i++) {
InstanceHolder temp = m_incrementalBuffer.get(i);
temp.m_instance.setDataset(null);
oos.writeObject(temp);
if (i % (m_bufferSizeI / 10) == 0) {
oos.reset();
}
}
bos.flush();
oos.close();
m_incrementalBuffer.clear();
}
private void copyStringAttVals(InstanceHolder holder) {
for (String attName : m_stringAttIndexes.keySet()) {
Attribute att = holder.m_instance.dataset().attribute(attName);
String val = holder.m_instance.stringValue(att);
if (holder.m_stringVals == null) {
holder.m_stringVals = new HashMap<String, String>();
}
holder.m_stringVals.put(attName, val);
}
}
/**
* Inner class that holds instances and the index of the temp file that holds
* them (if operating in incremental mode)
*/
protected static class InstanceHolder implements Serializable {
/** For serialization */
private static final long serialVersionUID = -3985730394250172995L;
/** The instance */
protected Instance m_instance;
/** index into the list of files on disk */
protected int m_fileNumber;
/**
* for incremental operation, if string attributes are present then we need
* to store them with each instance - since incremental streaming in the
* knowledge flow only maintains one string value in memory (and hence in
* the header) at any one time
*/
protected Map<String, String> m_stringVals;
}
/**
* Comparator that applies the sort rules to {@code InstanceHolder}s
*/
protected static class SortComparator implements Comparator<InstanceHolder> {
/** The rules to apply */
protected List<SortRule> m_sortRules;
/**
* Constructor
*
* @param sortRules the rules to apply
*/
public SortComparator(List<SortRule> sortRules) {
m_sortRules = sortRules;
}
/**
* @param o1 the first {@code InstanceHolder} to compare
* @param o2 the second {@code InstanceHolder} to compare
* @return the result of the comparison - the first rule that returns a
* non-zero comparison value
*/
@Override
public int compare(InstanceHolder o1, InstanceHolder o2) {
int cmp = 0;
for (SortRule sr : m_sortRules) {
cmp = sr.compare(o1, o2);
if (cmp != 0) {
return cmp;
}
}
return 0;
}
}
/**
* Implements a sorting rule based on a single attribute
*/
public static class SortRule implements Comparator<InstanceHolder> {
/** Name or index of the attribute to compare on */
protected String m_attributeNameOrIndex;
/** The actual attribute to compare on */
protected Attribute m_attribute;
/** True for descending instead of ascending order */
protected boolean m_descending;
/**
* Constructor
*
* @param att the name or index of the attribute to compare on
* @param descending true if order should be descending
*/
public SortRule(String att, boolean descending) {
m_attributeNameOrIndex = att;
m_descending = descending;
}
/**
* Constructor
*/
public SortRule() {
}
/**
* Constructor
*
* @param setup the definition of a sort rule
*/
public SortRule(String setup) {
parseFromInternal(setup);
}
protected void parseFromInternal(String setup) {
String[] parts = setup.split("@@SR@@");
if (parts.length != 2) {
throw new IllegalArgumentException("Malformed sort rule: " + setup);
}
m_attributeNameOrIndex = parts[0].trim();
m_descending = parts[1].equalsIgnoreCase("Y");
}
/**
* Gets the rule in internal format
*
* @return the rule in internal format
*/
public String toStringInternal() {
return m_attributeNameOrIndex + "@@SR@@" + (m_descending ? "Y" : "N");
}
/**
* Prints the rule in human readable format
*
* @return a human readable formatted rule
*/
@Override
public String toString() {
StringBuffer res = new StringBuffer();
res.append("Attribute: " + m_attributeNameOrIndex + " - sort "
+ (m_descending ? "descending" : "ascending"));
return res.toString();
}
/**
* Set the name or index of the attribute to sort on
*
* @param att the name or index of tha attribute to sort on
*/
public void setAttribute(String att) {
m_attributeNameOrIndex = att;
}
/**
* Get the name or index of the attribute to sort on
*
* @return the name or index of the attribute to sort on
*/
public String getAttribute() {
return m_attributeNameOrIndex;
}
/**
* Set whether the sort should be descending rather than ascending
*
* @param d true for a descending sort
*/
public void setDescending(boolean d) {
m_descending = d;
}
/**
* Return true if the sort is descending
*
* @return true if the sort is descending
*/
public boolean getDescending() {
return m_descending;
}
/**
* Initialize the rule
*
* @param env the environment variables to use
* @param structure the structure of the instances that the rule will
* opperate on
*/
public void init(Environment env, Instances structure) {
String attNameI = m_attributeNameOrIndex;
try {
attNameI = env.substitute(attNameI);
} catch (Exception ex) {
}
if (attNameI.equalsIgnoreCase("/first")) {
m_attribute = structure.attribute(0);
} else if (attNameI.equalsIgnoreCase("/last")) {
m_attribute = structure.attribute(structure.numAttributes() - 1);
} else {
// try actual attribute name
m_attribute = structure.attribute(attNameI);
if (m_attribute == null) {
// try as an index
try {
int index = Integer.parseInt(attNameI);
m_attribute = structure.attribute(index);
} catch (NumberFormatException n) {
throw new IllegalArgumentException("Unable to locate attribute "
+ attNameI + " as either a named attribute or as a valid "
+ "attribute index");
}
}
}
}
/**
* Compare two instances according to the rule
*
* @param o1 the first instance
* @param o2 the second instance
* @return the result of the comparison
*/
@Override
public int compare(InstanceHolder o1, InstanceHolder o2) {
// both missing is equal
if (o1.m_instance.isMissing(m_attribute)
&& o2.m_instance.isMissing(m_attribute)) {
return 0;
}
// one missing - missing instances should all be at the end
// regardless of whether order is ascending or descending
if (o1.m_instance.isMissing(m_attribute)) {
return 1;
}
if (o2.m_instance.isMissing(m_attribute)) {
return -1;
}
int cmp = 0;
if (!m_attribute.isString() && !m_attribute.isRelationValued()) {
double val1 = o1.m_instance.value(m_attribute);
double val2 = o2.m_instance.value(m_attribute);
cmp = Double.compare(val1, val2);
} else if (m_attribute.isString()) {
String val1 = o1.m_stringVals.get(m_attribute.name());
String val2 = o2.m_stringVals.get(m_attribute.name());
/*
* String val1 = o1.stringValue(m_attribute); String val2 =
* o2.stringValue(m_attribute);
*/
// TODO case insensitive?
cmp = val1.compareTo(val2);
} else {
throw new IllegalArgumentException("Can't sort according to "
+ "relation-valued attribute values!");
}
if (m_descending) {
return -cmp;
}
return cmp;
}
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.SorterStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/Step.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Step.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.List;
import java.util.Map;
import weka.core.Defaults;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.StepInteractiveViewer;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Client API for Knowledge Flow steps. Typically, an implementation would
* extend BaseStep. A minimal subset of the methods in this class that a simple
* implementation extending BaseStep would need to address is also specified in
* the BaseStepExtender interface.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
* @see BaseStep
* @see BaseStepExtender
*/
public interface Step {
/**
* Get the step manager in use with this step
*
* @return the step manager
*/
StepManager getStepManager();
/**
* Set the step manager to use with this step. The execution environment will
* call this method to provide a StepManager.
*
* @param manager the step manager to use
*/
void setStepManager(StepManager manager);
/**
* Get the name of this step
*
* @return the name of this step
*/
String getName();
/**
* Set the name for this step
*
* @param name the name for this step
*/
void setName(String name);
/**
* Initialize the step.
*
* @throws WekaException if a problem occurs during initialization
*/
void stepInit() throws WekaException;
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
List<String> getIncomingConnectionTypes();
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
List<String> getOutgoingConnectionTypes();
/**
* Start executing (if this component is a start point)
*
* @throws WekaException if a problem occurs
*/
void start() throws WekaException;
/**
* Request a stop to all processing by this step (as soon as possible)
*/
void stop();
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
* @throws InterruptedException
*/
void processIncoming(Data data) throws WekaException, InterruptedException;
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
Instances outputStructureForConnectionType(String connectionName) throws WekaException;
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
String getCustomEditorForStep();
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
Map<String, String> getInteractiveViewers();
/**
* An alternative to getStepInteractiveViewers that returns a Map of
* instantiated StepInteractiveViewer objects. Generally,
* getInteractiveViewers() is the preferred mechanism to specify any
* interactive viewers, as it does not require Steps to import and instantiate
* GUI classes. However, in some cases it might be unavoidable (e.g. Groovy
* script compilation involves custom classloaders), in these cases this
* method can be used instead.
*
* @return a map of instantiated instances of StepInteractiveViewers
*/
Map<String, StepInteractiveViewer> getInteractiveViewersImpls();
/**
* Get default settings for the step (if any). Returning null indicates that
* the step has no user-editable defaults.
*
* @return the default settings
*/
Defaults getDefaultSettings();
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/StorePropertiesInEnvironment.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StorePropertiesInEnvironment.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import weka.core.Attribute;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.StepManager;
/**
* Stores property values specified in incoming instances in the flow
* environment.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "StorePropertiesInEnvironment",
category = "Flow",
toolTipText = "Store property settings for a particular algorithm-based step "
+ "(eg Classifier, Clusterer etc) in the flow environment. When connected "
+ "to a downstream Job step, the sub-flow executed by the Job can use a "
+ "SetPropertiesFromEnvironment step to access the stored properties and "
+ "set them on the underlying scheme in an algorithm-based step. Each property "
+ "is configured by specifying the attribute in the incoming instance to obtain "
+ "its value from, the target scheme-based step (in the sub-flow) that will "
+ "receive it, the property name/path to set on the target step and a default "
+ "property value (optional) to use if the value is missing in the incoming "
+ "instance. If the property/path field is left blank, then it is assumed that "
+ "the value is actually a scheme + options spec in command-line form; otherwise, "
+ "the value is set by processing the property path - e.g. if our target step "
+ "to receive property settings was Bagging (itself with default settings), and "
+ "the property path to set was 'classifier.maxDepth', then the classifier property "
+ "of Bagging would yield a REPTree base classifier and the maxDepth property of "
+ "REPTree would be set. Note that the SetPropertiesFromEnvironment step will "
+ "process property settings in the order that they are defined by this step. This "
+ "means that it is possible to set the entire base learner for a Classifier step"
+ "with one property setting and then drill down to a particular option in the "
+ "base learner using a second property setting.",
iconPath = KFGUIConsts.BASE_ICON_PATH + "StorePropertiesInEnvironment.gif")
public class StorePropertiesInEnvironment extends BaseStep {
private static final long serialVersionUID = -1526289154505863542L;
/** Separators for internal variable specification */
public static final String SEP1 = "@@vv@@";
public static final String SEP2 = "@a@a";
/**
* Map of properties to set based on the values of attributes in incoming
* instances. Keyed by attribute name/index. List contains target step name,
* property path (can be empty string to indicate a command line spec for a
* complete base-scheme config), default property value. If an incoming
* attribute value is missing, and no default property value is available, an
* exception will be generated.
*/
protected Map<String, List<String>> m_propsToSetFromIncomingInstances =
new LinkedHashMap<>();
/** True if the structure has been checked */
protected boolean m_structureCheckComplete;
/**
* OK if there is at least one specified attribute in the incoming instance
* structure
*/
protected boolean m_structureOK;
/** Internal string-based representation of property configs */
protected String m_internalRep = "";
protected boolean m_raiseErrorWhenValueMissing;
@ProgrammaticProperty
public void setPropsInternalRep(String rep) {
m_internalRep = rep;
}
public String getPropsInternalRep() {
return m_internalRep;
}
@Override
public void stepInit() throws WekaException {
m_structureCheckComplete = false;
m_structureOK = false;
m_propsToSetFromIncomingInstances = internalDynamicToMap(m_internalRep);
Environment currentEnv =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (currentEnv == null) {
throw new WekaException(
"The execution environment doesn't seem to have any support for variables");
}
if (!(currentEnv instanceof JobEnvironment)) {
currentEnv = new JobEnvironment(currentEnv);
getStepManager().getExecutionEnvironment().setEnvironmentVariables(
currentEnv);
}
if (getStepManager().numIncomingConnections() > 0
&& m_propsToSetFromIncomingInstances.size() == 0) {
getStepManager().logWarning(
"Incoming data detected, but no properties to "
+ "set from incoming instances have been defined.");
}
}
@Override
public void processIncoming(Data data) throws WekaException {
if (!m_structureCheckComplete) {
m_structureCheckComplete = true;
Instances structure = null;
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)) {
structure = ((Instance) data.getPrimaryPayload()).dataset();
} else if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
structure =
((Instance) data.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE))
.dataset();
} else {
structure = data.getPrimaryPayload();
}
checkStructure(structure);
}
getStepManager().processing();
if (data.getConnectionName().equals(StepManager.CON_INSTANCE)
|| data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
if (isStopRequested()) {
getStepManager().interrupted();
return;
}
if (getStepManager().isStreamFinished(data)) {
Data finished = new Data(StepManager.CON_ENVIRONMENT);
if (data.getConnectionName().equals(StepManager.CON_ENVIRONMENT)) {
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES));
finished
.setPayloadElement(
StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
data
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES));
}
getStepManager().throughputFinished(finished);
return;
}
Instance toProcess =
(Instance) (data.getConnectionName().equals(StepManager.CON_INSTANCE) ? data
.getPrimaryPayload() : data
.getPayloadElement(StepManager.CON_AUX_DATA_INSTANCE));
getStepManager().throughputUpdateStart();
processInstance(toProcess,
data.getConnectionName().equals(StepManager.CON_ENVIRONMENT) ? data
: null);
getStepManager().throughputUpdateEnd();
} else {
Instances insts = data.getPrimaryPayload();
for (int i = 0; i < insts.numInstances(); i++) {
if (isStopRequested()) {
break;
}
processInstance(insts.instance(i), null);
Data finished = new Data(StepManager.CON_ENVIRONMENT);
getStepManager().throughputFinished(finished);
}
if (isStopRequested()) {
getStepManager().interrupted();
}
}
}
protected void processInstance(Instance inst, Data existingEnv)
throws WekaException {
Map<String, Map<String, String>> props = new HashMap<>();
for (Map.Entry<String, List<String>> e : m_propsToSetFromIncomingInstances
.entrySet()) {
String attName = environmentSubstitute(e.getKey());
Attribute current = inst.dataset().attribute(attName);
int index = -1;
if (current != null) {
index = current.index();
} else {
// try as a 1-based index
try {
index = Integer.parseInt(attName);
index--; // make zero-based
} catch (NumberFormatException ex) {
// ignore
}
}
if (index != -1) {
String stepName = environmentSubstitute(e.getValue().get(0));
String propToSet = environmentSubstitute(e.getValue().get(1));
String val = environmentSubstitute(e.getValue().get(2));
if (inst.isMissing(index)) {
if (val.length() == 0 && m_raiseErrorWhenValueMissing) {
throw new WekaException("Value of attribute '"
+ inst.attribute(index).name()
+ "' was missing in current instance and no default value has "
+ "been specified");
}
} else {
val = inst.stringValue(index);
}
Map<String, String> propsForStep = props.get(stepName);
if (propsForStep == null) {
propsForStep = new LinkedHashMap<>();
props.put(stepName, propsForStep);
}
propsForStep.put(propToSet, val);
getStepManager().logDebug(
"Storing property '" + propToSet + "' for step " + "'" + stepName
+ "' with value '" + val + "'");
}
}
JobEnvironment env =
(JobEnvironment) getStepManager().getExecutionEnvironment()
.getEnvironmentVariables();
env.addToStepProperties(props);
if (existingEnv != null) {
Map<String, Map<String, String>> existingProps =
existingEnv
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES);
if (existingProps != null) {
props.putAll(existingProps);
}
}
Data output = new Data(StepManager.CON_ENVIRONMENT);
output.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_PROPERTIES,
props);
if (existingEnv != null) {
output.setPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES,
existingEnv
.getPayloadElement(StepManager.CON_AUX_DATA_ENVIRONMENT_VARIABLES));
}
output.setPayloadElement(StepManager.CON_AUX_DATA_INSTANCE, inst);
output.setPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, true);
getStepManager().outputData(output);
}
protected void checkStructure(Instances structure) {
List<String> notFoundInIncoming = new ArrayList<>();
for (String attName : m_propsToSetFromIncomingInstances.keySet()) {
if (structure.attribute(attName) == null) {
notFoundInIncoming.add(attName);
} else {
m_structureOK = true;
}
}
if (notFoundInIncoming.size() == m_propsToSetFromIncomingInstances.size()) {
getStepManager().logWarning(
"None of the specified attributes appear to be "
+ "in the incoming instance structure");
return;
}
for (String s : notFoundInIncoming) {
getStepManager().logWarning(
"Attribute '" + s + "' was not found in the "
+ "incoming instance structure");
}
}
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET,
StepManager.CON_INSTANCE, StepManager.CON_ENVIRONMENT);
}
return new ArrayList<>();
}
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() != 0) {
return Arrays.asList(StepManager.CON_ENVIRONMENT);
}
return new ArrayList<>();
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.StorePropertiesInEnvironmentStepEditorDialog";
}
public static Map<String, List<String>> internalDynamicToMap(
String internalRep) {
Map<String, List<String>> propsToSet = new LinkedHashMap<>();
if (internalRep != null && internalRep.length() > 0) {
String[] parts = internalRep.split(SEP1);
for (String p : parts) {
String[] attVal = p.split(SEP2);
if (attVal.length == 4) {
String attName = attVal[0].trim();
String stepName = attVal[1].trim();
String propName = attVal[2].trim();
String defVal = attVal[3].trim();
if (attName.length() > 0 && stepName.length() > 0) {
List<String> stepAndDefL = new ArrayList<>();
stepAndDefL.add(stepName);
stepAndDefL.add(propName);
stepAndDefL.add(defVal);
propsToSet.put(attName, stepAndDefL);
}
}
}
}
return propsToSet;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/StripChart.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* StripChart.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A step that can display a viewer showing a right-to-left scrolling chart
* for streaming data
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "StripChart", category = "Visualization",
toolTipText = "Plot streaming data", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "StripChart.gif")
public class StripChart extends BaseStep {
private static final long serialVersionUID = -2569383350174947630L;
/** Parties interested in knowing about updates */
protected List<PlotNotificationListener> m_plotListeners =
new ArrayList<PlotNotificationListener>();
/** Frequency for plotting x values */
protected int m_xValFreq = 500;
/**
* Plot every m_refreshFrequency'th point
*/
private int m_refreshFrequency = 5;
private int m_userRefreshWidth = 1;
/** True if we've been reset */
protected boolean m_reset;
/**
* Holds the number of attribute values (10 max) to plot if processing an
* incoming instance stream
*/
protected int m_instanceWidth;
/**
* GUI Tip text
*
* @return the tip text for this option
*/
public String xLabelFreqTipText() {
return "Show x axis labels this often";
}
/**
* Get the x label frequency
*
* @return the x label frequency
*/
public int getXLabelFreq() {
return m_xValFreq;
}
/**
* Set the x label frequency
*
* @param freq the x label frequency
*/
public void setXLabelFreq(int freq) {
m_xValFreq = freq;
}
/**
* GUI Tip text
*
* @return a <code>String</code> value
*/
public String refreshFreqTipText() {
return "Plot every x'th data point";
}
/**
* Set how often (in x axis points) to refresh the display
*
* @param freq an <code>int</code> value
*/
public void setRefreshFreq(int freq) {
m_refreshFrequency = freq;
}
/**
* Get the refresh frequency
*
* @return an <code>int</code> value
*/
public int getRefreshFreq() {
return m_refreshFrequency;
}
/**
* GUI Tip text
*
* @return a <code>String</code> value
*/
public String refreshWidthTipText() {
return "The number of pixels to shift the plot by every time a point"
+ " is plotted.";
}
/**
* Set how many pixels to shift the plot by every time a point is plotted
*
* @param width the number of pixels to shift the plot by
*/
public void setRefreshWidth(int width) {
if (width > 0) {
m_userRefreshWidth = width;
}
}
/**
* Get how many pixels to shift the plot by every time a point is plotted
*
* @return the number of pixels to shift the plot by
*/
public int getRefreshWidth() {
return m_userRefreshWidth;
}
@Override
public void stepInit() throws WekaException {
m_reset = true;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@SuppressWarnings("unchecked")
@Override
public synchronized void processIncoming(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
if (getStepManager().isStreamFinished(data)) {
// done
// notify downstream steps of end of stream
Data d = new Data(data.getConnectionName());
getStepManager().throughputFinished(d);
return;
}
getStepManager().throughputUpdateStart();
if (m_plotListeners.size() > 0) {
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) > 0) {
Instance instance =
(Instance) data.getPayloadElement(StepManager.CON_INSTANCE);
if (m_reset) {
m_reset = false;
List<String> legendEntries = new ArrayList<String>();
int i;
for (i = 0; i < instance.dataset().numAttributes() && i < 10; i++) {
legendEntries.add(instance.dataset().attribute(i).name());
}
m_instanceWidth = i;
for (PlotNotificationListener l : m_plotListeners) {
l.setLegend(legendEntries, 0.0, 1.0);
}
}
double[] dataPoint = new double[m_instanceWidth];
for (int i = 0; i < dataPoint.length; i++) {
if (!instance.isMissing(i)) {
dataPoint[i] = instance.value(i);
}
}
for (PlotNotificationListener l : m_plotListeners) {
l.acceptDataPoint(dataPoint);
}
} else if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_CHART) > 0) {
if (m_reset) {
m_reset = false;
double min =
data.getPayloadElement(StepManager.CON_AUX_DATA_CHART_MIN, 0.0);
double max =
data.getPayloadElement(StepManager.CON_AUX_DATA_CHART_MAX, 1.0);
List<String> legend =
(List<String>) data
.getPayloadElement(StepManager.CON_AUX_DATA_CHART_LEGEND);
for (PlotNotificationListener l : m_plotListeners) {
l.setLegend(legend, min, max);
}
}
double[] dataPoint =
(double[]) data
.getPayloadElement(StepManager.CON_AUX_DATA_CHART_DATA_POINT);
for (PlotNotificationListener l : m_plotListeners) {
l.acceptDataPoint(dataPoint);
}
}
}
getStepManager().throughputUpdateEnd();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE, StepManager.CON_CHART);
}
return new ArrayList<String>();
}
/**
* Add a plot notification listener
*
* @param listener the listener to be notified
*/
public synchronized void addPlotNotificationListener(
PlotNotificationListener listener) {
m_plotListeners.add(listener);
}
/**
* Remove a plot notification listener
*
* @param l the listener to remove
*/
public synchronized void removePlotNotificationListener(
PlotNotificationListener l) {
m_plotListeners.remove(l);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return new ArrayList<String>();
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
views.put("Show chart",
"weka.gui.knowledgeflow.steps.StripChartInteractiveView");
return views;
}
/**
* StripChartInteractiveView implements this in order to receive data points.
* Other potential viewer implementations could as well.
*/
public interface PlotNotificationListener {
void setLegend(List<String> legendEntries, double min, double max);
void acceptDataPoint(double[] dataPoint);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SubstringLabeler.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SubstringLabeler.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Environment;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.filters.unsupervised.attribute.Add;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.SubstringLabelerRules;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Step that appends a label to incoming instances according to substring
* matches in string attributes. Multiple match "rules" can be
* specified - these get applied in the order that they are defined. Each rule
* can be applied to one or more user-specified input String attributes.
* Attributes can be specified using either a range list (e.g 1,2-10,last) or by
* a comma separated list of attribute names (where "/first" and "/last" are
* special strings indicating the first and last attribute respectively).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "SubstringLabeler", category = "Tools",
toolTipText = "Label instances according to substring matches in String "
+ "attributes "
+ "The user can specify the attributes to match "
+ "against and associated label to create by defining 'match' rules. A new attribute is appended "
+ "to the data to contain the label. Rules are applied in order when processing instances, and the "
+ "label associated with the first matching rule is applied. Non-matching instances can either receive "
+ "a missing value for the label attribute or be 'consumed' (i.e. they are not output).",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DefaultFilter.gif")
public class SubstringLabeler extends BaseStep {
private static final long serialVersionUID = 1409175779108600014L;
/** Internally encoded list of match rules */
protected String m_matchDetails = "";
/** Encapsulates our match rules */
protected transient SubstringLabelerRules m_matches;
/**
* Whether to make the binary match/non-match attribute a nominal (rather than
* numeric) binary attribute.
*/
protected boolean m_nominalBinary;
/**
* For multi-valued labeled rules, whether or not to consume non-matching
* instances or output them with missing value for the match attribute.
*/
protected boolean m_consumeNonMatchingInstances;
/** Add filter for adding the new attribute */
protected Add m_addFilter;
/** Name of the new attribute */
protected String m_attName = "Match";
/** Step has been reset - i.e. start of processing? */
protected boolean m_isReset;
/** Reusable data object for output */
protected Data m_streamingData;
/** Streaming instances? */
protected boolean m_streaming;
/**
* Set internally encoded list of match rules
*
* @param details the list of match rules
*/
@ProgrammaticProperty
public void setMatchDetails(String details) {
m_matchDetails = details;
}
/**
* Get the internally encoded list of match rules
*
* @return the match rules
*/
public String getMatchDetails() {
return m_matchDetails;
}
/**
* Set whether the new attribute created should be a nominal binary attribute
* rather than a numeric binary attribute.
*
* @param nom true if the attribute should be a nominal binary one
*/
@OptionMetadata(displayName = "Make a nominal binary attribute",
description = "Whether to encode the new attribute as nominal "
+ "when it is binary (as opposed to numeric)", displayOrder = 1)
public void setNominalBinary(boolean nom) {
m_nominalBinary = nom;
}
/**
* Get whether the new attribute created should be a nominal binary attribute
* rather than a numeric binary attribute.
*
* @return true if the attribute should be a nominal binary one
*/
public boolean getNominalBinary() {
return m_nominalBinary;
}
/**
* Set whether instances that do not match any of the rules should be
* "consumed" rather than output with a missing value set for the new
* attribute.
*
* @param consume true if non matching instances should be consumed by the
* component.
*/
@OptionMetadata(displayName = "Consume non matching instances",
description = "Instances that do not match any rules will be consumed, "
+ "rather than being output with a missing value for the new attribute",
displayOrder = 2)
public void setConsumeNonMatching(boolean consume) {
m_consumeNonMatchingInstances = consume;
}
/**
* Get whether instances that do not match any of the rules should be
* "consumed" rather than output with a missing value set for the new
* attribute.
*
* @return true if non matching instances should be consumed by the component.
*/
public boolean getConsumeNonMatching() {
return m_consumeNonMatchingInstances;
}
/**
* Set the name of the new attribute that is created to indicate the match
*
* @param name the name of the new attribute
*/
@OptionMetadata(displayName = "Name of the new attribute",
description = "Name to give the new attribute", displayOrder = 0)
public void setMatchAttributeName(String name) {
m_attName = name;
}
/**
* Get the name of the new attribute that is created to indicate the match
*
* @return the name of the new attribute
*/
public String getMatchAttributeName() {
return m_attName;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_streamingData = new Data(StepManager.CON_INSTANCE);
m_streaming = false;
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return getStepManager().numIncomingConnections() == 0 ? Arrays.asList(
StepManager.CON_INSTANCE, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET) : null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
List<String> result = new ArrayList<String>();
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
if (e.getValue().size() > 0) {
result.add(e.getKey());
}
}
return result;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Instances structure;
Instance inst;
if (m_isReset) {
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) > 0) {
inst = data.getPrimaryPayload();
structure = inst.dataset();
m_streaming = true;
} else {
structure = data.getPrimaryPayload();
structure = new Instances(structure, 0);
}
try {
m_matches =
new SubstringLabelerRules(m_matchDetails, m_attName,
getConsumeNonMatching(), getNominalBinary(), structure,
((StepManagerImpl) getStepManager()).stepStatusMessagePrefix(),
getStepManager().getLog(), getStepManager()
.getExecutionEnvironment().getEnvironmentVariables());
} catch (Exception ex) {
throw new WekaException(ex);
}
m_isReset = false;
}
if (m_streaming) {
if (getStepManager().isStreamFinished(data)) {
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
return;
} else {
processStreaming(data);
}
} else {
processBatch(data);
}
if (isStopRequested()) {
getStepManager().interrupted();
} else if (!m_streaming) {
getStepManager().finished();
}
}
/**
* Processes a streaming data object
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected void processStreaming(Data data) throws WekaException {
getStepManager().throughputUpdateStart();
Instance toProcess = data.getPrimaryPayload();
try {
Instance result = m_matches.makeOutputInstance(toProcess, false);
if (result != null) {
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, result);
getStepManager().outputData(m_streamingData);
getStepManager().throughputUpdateEnd();
}
} catch (Exception ex) {
throw new WekaException(ex);
}
}
/**
* Process a batch data object
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
protected void processBatch(Data data) throws WekaException {
if (isStopRequested()) {
return;
}
Instances batch = data.getPrimaryPayload();
for (int i = 0; i < batch.numInstances(); i++) {
Instance current = batch.instance(i);
Instance result = null;
try {
result = m_matches.makeOutputInstance(current, true);
} catch (Exception ex) {
ex.printStackTrace();
}
if (isStopRequested()) {
return;
}
if (result != null) {
m_matches.getOutputStructure().add(result);
}
}
Data outputD =
new Data(data.getConnectionName(), m_matches.getOutputStructure());
outputD.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM));
outputD.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM,
data.getPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM));
getStepManager().outputData(outputD);
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
// we output the same structure as we receive + one additional attribute
if (getStepManager().numIncomingConnections() > 0) {
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
if (e.getValue().size() > 0) {
StepManager incoming = e.getValue().get(0);
String incomingConnType = e.getKey();
Instances incomingStruc =
getStepManager().getIncomingStructureFromStep(incoming,
incomingConnType);
if (incomingStruc == null) {
return null;
}
try {
SubstringLabelerRules rules =
new SubstringLabelerRules(m_matchDetails, m_attName,
getConsumeNonMatching(), getNominalBinary(), incomingStruc,
((StepManagerImpl) getStepManager()).stepStatusMessagePrefix(),
null, Environment.getSystemWide());
return rules.getOutputStructure();
} catch (Exception ex) {
throw new WekaException(ex);
}
}
}
}
return null;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.SubstringLabelerStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/SubstringReplacer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* SubstringReplacer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.ProgrammaticProperty;
import weka.gui.beans.SubstringReplacerRules;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import weka.knowledgeflow.StepManagerImpl;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* A step that can replace sub-strings in the values of string attributes. Only
* operates in streaming mode. Multiple match and replace "rules" can be
* specified - these get applied in the order that they are defined. Each rule
* can be applied to one or more user-specified input String attributes.
* Attributes can be specified using either a range list (e.g 1,2-10,last) or by
* a comma separated list of attribute names (where "/first" and "/last" are
* special strings indicating the first and last attribute respectively).
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "SubstringReplacer", category = "Tools",
toolTipText = "Replace substrings in String attribute values using "
+ "either literal match-and-replace or regular expression "
+ "matching. The attributes to apply the match and replace "
+ "rules to can be selected via a range string (e.g. "
+ "1-5,6-last) or by a comma-separated list of attribute "
+ "names (/first and /last can be used to indicate the first "
+ "and last attribute respectively)", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "DefaultFilter.gif")
public class SubstringReplacer extends BaseStep {
private static final long serialVersionUID = -8786642000811852824L;
/** Internally encoded list of match-replace rules */
protected String m_matchReplaceDetails = "";
/** Handles the rules for replacement */
protected transient SubstringReplacerRules m_mr;
/** Reusable data object for output */
protected Data m_streamingData;
/** Step has been reset - i.e. start of processing */
protected boolean m_isReset;
/**
* Set internally encoded list of match-replace rules
*
* @param details the list of match-replace rules
*/
@ProgrammaticProperty
public void setMatchReplaceDetails(String details) {
m_matchReplaceDetails = details;
}
/**
* Get the internally encoded list of match-replace rules
*
* @return the match-replace rules
*/
public String getMatchReplaceDetails() {
return m_matchReplaceDetails;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_streamingData = new Data(StepManager.CON_INSTANCE);
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_INSTANCE);
}
return null;
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
return Arrays.asList(StepManager.CON_INSTANCE);
}
return null;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
Instance inst = data.getPrimaryPayload();
if (m_isReset) {
m_isReset = false;
Instances structure = inst.dataset();
m_mr =
new SubstringReplacerRules(m_matchReplaceDetails, structure,
((StepManagerImpl) getStepManager()).stepStatusMessagePrefix(),
getStepManager().getLog(), getStepManager().getExecutionEnvironment()
.getEnvironmentVariables());
}
if (getStepManager().isStreamFinished(data)) {
m_streamingData.clearPayload();
getStepManager().throughputFinished(m_streamingData);
} else {
if (!isStopRequested()) {
getStepManager().throughputUpdateStart();
Instance outInst = m_mr.makeOutputInstance(inst);
getStepManager().throughputUpdateEnd();
m_streamingData.setPayloadElement(StepManager.CON_INSTANCE, outInst);
getStepManager().outputData(m_streamingData);
} else {
getStepManager().interrupted();
}
}
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
// we output the same structure as we receive
if (getStepManager().numIncomingConnections() > 0) {
for (Map.Entry<String, List<StepManager>> e : getStepManager()
.getIncomingConnections().entrySet()) {
if (e.getValue().size() > 0) {
StepManager incoming = e.getValue().get(0);
String incomingConnType = e.getKey();
return getStepManager().getIncomingStructureFromStep(incoming,
incomingConnType);
}
}
}
return null;
}
/**
* Return the fully qualified name of a custom editor component (JComponent)
* to use for editing the properties of the step. This method can return null,
* in which case the system will dynamically generate an editor using the
* GenericObjectEditor
*
* @return the fully qualified name of a step editor component
*/
@Override
public String getCustomEditorForStep() {
return "weka.gui.knowledgeflow.steps.SubstringReplacerStepEditorDialog";
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/TestSetMaker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TestSetMaker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* A step that makes an incoming dataSet or trainingSet into a testSet.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
*/
@KFStep(name = "TestSetMaker", category = "Evaluation",
toolTipText = "Make an incoming dataSet or trainingSet into a testSet",
iconPath = KFGUIConsts.BASE_ICON_PATH + "TestSetMaker.gif")
public class TestSetMaker extends BaseStep {
private static final long serialVersionUID = 6384920860783839811L;
/**
* Initialize the step
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String incomingConnName = data.getConnectionName();
Instances insts = (Instances) data.getPayloadElement(incomingConnName);
if (insts == null) {
throw new WekaException("Incoming instances should not be null!");
}
getStepManager().logBasic(
"Creating a test set for relation " + insts.relationName());
Data newData = new Data();
newData.setPayloadElement(StepManager.CON_TESTSET, insts);
newData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
newData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (!isStopRequested()) {
getStepManager().outputData(StepManager.CON_TESTSET, newData);
}
getStepManager().finished();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays
.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
return Arrays.asList(StepManager.CON_TESTSET);
}
return new ArrayList<String>();
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (!connectionName.equals(StepManager.CON_TESTSET)
|| getStepManager().numIncomingConnections() == 0) {
return null;
}
Instances strucForDatasetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
return strucForDatasetCon;
}
Instances strucForTrainingSetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_TRAININGSET);
if (strucForTrainingSetCon != null) {
return strucForTrainingSetCon;
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/TextSaver.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TextSaver.java
*
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*/
package weka.knowledgeflow.steps;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.Arrays;
import java.util.List;
import weka.core.Defaults;
import weka.core.OptionMetadata;
import weka.core.Settings;
import weka.core.WekaException;
import weka.gui.FilePropertyMetadata;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* Step for saving textual data to a file.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "TextSaver", category = "DataSinks",
toolTipText = "Save text output to a file",
iconPath = KFGUIConsts.BASE_ICON_PATH + "DefaultText.gif")
public class TextSaver extends BaseStep {
private static final long serialVersionUID = -1434752243260858338L;
/** The file to save to */
protected File m_file = new File("");
/** Whether to append to the file or not */
protected boolean m_append = true;
/** Whether to write the title string for each textual result too */
protected boolean m_writeTitleString;
/** Default location to write to, in case a file has not been explicitly set */
protected String m_defaultFile = "";
/**
* Set the file to save to
*
* @param f the file to save to
*/
@OptionMetadata(displayName = "File to save to",
description = "The file to save textual results to", displayOrder = 1)
@FilePropertyMetadata(fileChooserDialogType = KFGUIConsts.OPEN_DIALOG,
directoriesOnly = false)
public void setFile(File f) {
m_file = f;
}
/**
* Get the file to save to
*
* @return the file to save to
*/
public File getFile() {
return m_file;
}
/**
* Set whether the file should be appended to rather than overwritten
*
* @param append true to append
*/
@OptionMetadata(displayName = "Append to file",
description = "Append to file, rather than re-create for each incoming "
+ "texual result", displayOrder = 2)
public void setAppend(boolean append) {
m_append = append;
}
/**
* get whether the file should be appended to rather than overwritten
*
* @return true if the file will be appended to
*/
public boolean getAppend() {
return m_append;
}
/**
* Set whether the title string will be written to the file
*
* @param w true to write the title string
*/
@OptionMetadata(displayName = "Write title string",
description = "Whether to output the title string associated "
+ "with each textual result", displayOrder = 3)
public void setWriteTitleString(boolean w) {
m_writeTitleString = w;
}
/**
* Get whether the title string will be written to the file
*
* @return true if the title string will be written
*/
public boolean getWriteTitleString() {
return m_writeTitleString;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_defaultFile = getFile().toString();
if (m_defaultFile == null || m_defaultFile.length() == 0) {
File defaultF =
getStepManager().getSettings().getSetting(TextSaverDefaults.ID,
TextSaverDefaults.DEFAULT_FILE_KEY, TextSaverDefaults.DEFAULT_FILE,
getStepManager().getExecutionEnvironment().getEnvironmentVariables());
m_defaultFile = defaultF.toString();
}
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_TEXT);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String content = data.getPrimaryPayload();
String title = data.getPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE);
String fileName = getFile().toString();
if (fileName == null || fileName.length() == 0) {
fileName = m_defaultFile;
}
fileName = environmentSubstitute(fileName);
if (title != null && title.length() > 0) {
title = environmentSubstitute(title);
} else {
title = null;
}
if (!(new File(fileName)).isDirectory()) {
if (!fileName.toLowerCase().endsWith(".txt")) {
fileName += ".txt";
}
File file = new File(fileName);
getStepManager().logDetailed(
"Writing " + (title != null ? title : "file to " + file.toString()));
Writer writer = null;
try {
writer =
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file,
m_append), "utf-8"));
if (title != null && getWriteTitleString()) {
writer.write(title + "\n\n");
}
writer.write(content);
} catch (IOException e) {
throw new WekaException(e);
} finally {
if (writer != null) {
try {
writer.flush();
writer.close();
} catch (IOException e) {
throw new WekaException(e);
}
}
}
} else {
getStepManager().logWarning(
"Supplied file is a directory! Unable to write.");
}
if (!isStopRequested()) {
getStepManager().finished();
} else {
getStepManager().interrupted();
}
}
/**
* Get default settings for the step (if any). Returning null indicates that
* the step has no user-editable defaults.
*
* @return the default settings
*/
@Override
public Defaults getDefaultSettings() {
return new TextSaverDefaults();
}
/**
* Defaults for the {@TextSaver} step
*/
public static final class TextSaverDefaults extends Defaults {
public static final String ID = "weka.knowledgeflow.steps.textsaver";
public static final Settings.SettingKey DEFAULT_FILE_KEY =
new Settings.SettingKey(ID + ".defaultFile", "Default file to save to",
"Save to this file if the user has "
+ "not explicitly set one in the step");
public static final File DEFAULT_FILE = new File("${user.dir}/textout.txt");
private static final long serialVersionUID = -2739579935119189195L;
/**
* Constructor
*/
public TextSaverDefaults() {
super(ID);
m_defaults.put(DEFAULT_FILE_KEY, DEFAULT_FILE);
}
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/TextViewer.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TextViewer.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* A step for collecting and viewing textual data
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "TextViewer", category = "Visualization",
toolTipText = "View textual output", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "DefaultText.gif")
public class TextViewer extends BaseStep implements DataCollector {
private static final long serialVersionUID = 8602416209256135064L;
/** Holds textual results */
protected Map<String, String> m_results = new LinkedHashMap<String, String>();
/**
* The interactive popup viewer registers to receive updates when new textual
* results arrive
*/
protected transient TextNotificationListener m_viewerListener;
/**
* Initialize the step
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_TEXT, StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return getStepManager().numIncomingConnections() > 0 ? Arrays
.asList(StepManager.CON_TEXT) : new ArrayList<String>();
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public synchronized void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String title = data.getPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE);
if (title == null
&& (data.getConnectionName().equals(StepManager.CON_DATASET)
|| data.getConnectionName().equals(StepManager.CON_TRAININGSET) || data
.getConnectionName().equals(StepManager.CON_TESTSET))) {
title = ((Instances) data.getPrimaryPayload()).relationName();
}
if (title != null) {
getStepManager().logDetailed("Storing result: " + title);
}
String body = data.getPayloadElement(data.getConnectionName()).toString();
Integer setNum =
(Integer) data.getPayloadElement(StepManager.CON_AUX_DATA_SET_NUM);
if (title != null && body != null) {
String name =
(new SimpleDateFormat("HH:mm:ss.SSS - ")).format(new Date());
name = name + title + (setNum != null ? " (" + setNum + ")" : "");
if (m_results.containsKey(name)) {
try {
Thread.sleep(5);
name =
(new SimpleDateFormat("HH:mm:ss.SSS - ")).format(new Date());
name = name + title + (setNum != null ? " (" + setNum + ")" : "");
} catch (InterruptedException e) {
// ignore
}
}
m_results.put(name, body);
if (m_viewerListener != null) {
m_viewerListener.acceptTextResult(name + title
+ (setNum != null ? " (" + setNum + ")" : ""), body);
}
}
Data textData = new Data(StepManager.CON_TEXT, body);
textData.setPayloadElement(StepManager.CON_AUX_DATA_TEXT_TITLE, title);
// pass on downstream
getStepManager().outputData(textData);
getStepManager().finished();
}
/**
* When running in a graphical execution environment a step can make one or
* more popup Viewer components available. These might be used to display
* results, graphics etc. Returning null indicates that the step has no such
* additional graphical views. The map returned by this method should be keyed
* by action name (e.g. "View results"), and values should be fully qualified
* names of the corresponding StepInteractiveView implementation. Furthermore,
* the contents of this map can (and should) be dependent on whether a
* particular viewer should be made available - i.e. if execution hasn't
* occurred yet, or if a particular incoming connection type is not present,
* then it might not be possible to view certain results.
*
* Viewers can implement StepInteractiveView directly (in which case they need
* to extends JPanel), or extends the AbstractInteractiveViewer class. The
* later extends JPanel, uses a BorderLayout, provides a "Close" button and a
* method to add additional buttons.
*
* @return a map of viewer component names, or null if this step has no
* graphical views
*/
@Override
public Map<String, String> getInteractiveViewers() {
Map<String, String> views = new LinkedHashMap<String, String>();
if (m_viewerListener == null) {
views.put("Show results",
"weka.gui.knowledgeflow.steps.TextViewerInteractiveView");
}
return views;
}
/**
* Get the textual results stored in this step
*
* @return a map of results
*/
public synchronized Map<String, String> getResults() {
return m_results;
}
/**
* Get the results stored in this step. Calls {@code getResults()}
*
* @return the results (a map of named textual results) as an Object
*/
@Override
public Object retrieveData() {
return getResults();
}
/**
* Restore/set the data in this step
*
* @param data the data to set (is expected to be a map of Strings)
*/
@SuppressWarnings("unchecked")
@Override
public void restoreData(Object data) {
if (!(data instanceof Map)) {
throw new IllegalArgumentException("Argument must be a Map");
}
m_results = (Map<String, String>) data;
}
/**
* Set the listener to be notified about new textual results
*
* @param l the listener to receive notifications
*/
public void setTextNotificationListener(TextNotificationListener l) {
m_viewerListener = l;
}
/**
* Remove the listener for textual results
*
* @param l the listener to remove
*/
public void removeTextNotificationListener(TextNotificationListener l) {
if (l == m_viewerListener) {
m_viewerListener = null;
}
}
/**
* Interface for listeners of textual results
*/
public static interface TextNotificationListener {
/**
* Accept a new textual result
*
* @param name the name of the result
* @param text the text of the result
*/
void acceptTextResult(String name, String text);
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/TrainTestSplitMaker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TrainTestSplitMaker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import weka.core.Instances;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
/**
* A step that creates a random train/test split from an incoming data set.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "TrainTestSplitMaker", category = "Evaluation", toolTipText = "A step that randomly splits incoming data into a training and test set", iconPath = KFGUIConsts.BASE_ICON_PATH + "TrainTestSplitMaker.gif")
public class TrainTestSplitMaker extends BaseStep {
private static final long serialVersionUID = 7685026723199727685L;
/** Default split percentage */
protected String m_trainPercentageS = "66";
/** Default seed for the random number generator */
protected String m_seedS = "1";
/** Resolved percentage */
protected double m_trainPercentage = 66.0;
/**
* Whether to preserve the order of the data before making the split, rather
* than randomly shuffling
*/
protected boolean m_preserveOrder;
/** Resolved seed */
protected long m_seed = 1L;
/**
* Set the training percentage
*
* @param percent the training percentage
*/
@OptionMetadata(displayName = "Training percentage", description = "The percentage of data to go into the training set", displayOrder = 1)
public void setTrainPercent(final String percent) {
this.m_trainPercentageS = percent;
}
/**
* Get the training percentage
*
* @return the training percentage
*/
public String getTrainPercent() {
return this.m_trainPercentageS;
}
/**
* Set the random seed to use
*
* @param seed the random seed to use
*/
@OptionMetadata(displayName = "Random seed", description = "The random seed to use when shuffling the data", displayOrder = 2)
public void setSeed(final String seed) {
this.m_seedS = seed;
}
/**
* Get the random seed to use
*
* @return the random seed to use
*/
public String getSeed() {
return this.m_seedS;
}
/**
* Set whether to preserve the order of the instances or not
*
* @param preserve true to preserve the order rather than randomly shuffling
* first
*/
@OptionMetadata(displayName = "Preserve instance order", description = "Preserve the order of the instances rather than randomly shuffling", displayOrder = 3)
public void setPreserveOrder(final boolean preserve) {
this.m_preserveOrder = preserve;
}
/**
* Get whether to preserve the order of the instances or not
*
* @return true to preserve the order rather than randomly shuffling first
*/
public boolean getPreserveOrder() {
return this.m_preserveOrder;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
String seed = this.getStepManager().environmentSubstitute(this.getSeed());
try {
this.m_seed = Long.parseLong(seed);
} catch (NumberFormatException ex) {
this.getStepManager().logWarning("Unable to parse seed value: " + seed);
}
String tP = this.getStepManager().environmentSubstitute(this.getTrainPercent());
try {
this.m_trainPercentage = Double.parseDouble(tP);
} catch (NumberFormatException ex) {
this.getStepManager().logWarning("Unable to parse train percentage value: " + tP);
}
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(final Data data) throws WekaException {
this.getStepManager().processing();
String incomingConnName = data.getConnectionName();
Instances dataSet = (Instances) data.getPayloadElement(incomingConnName);
if (dataSet == null) {
throw new WekaException("Incoming instances should not be null!");
}
this.getStepManager().logBasic("Creating train/test split");
this.getStepManager().statusMessage("Creating train/test split");
if (!this.getPreserveOrder()) {
try {
dataSet.randomize(new Random(this.m_seed));
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
int trainSize = (int) Math.round(dataSet.numInstances() * this.m_trainPercentage / 100);
int testSize = dataSet.numInstances() - trainSize;
Instances train = new Instances(dataSet, 0, trainSize);
Instances test = new Instances(dataSet, trainSize, testSize);
Data trainData = new Data(StepManager.CON_TRAININGSET);
trainData.setPayloadElement(StepManager.CON_TRAININGSET, train);
trainData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
trainData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
Data testData = new Data(StepManager.CON_TESTSET);
testData.setPayloadElement(StepManager.CON_TESTSET, test);
testData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
testData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (!this.isStopRequested()) {
this.getStepManager().outputData(trainData, testData);
}
this.getStepManager().finished();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (this.getStepManager().numIncomingConnections() > 0) {
return new ArrayList<String>();
}
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET, StepManager.CON_TESTSET);
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
return this.getStepManager().numIncomingConnections() > 0 ? Arrays.asList(StepManager.CON_TRAININGSET, StepManager.CON_TESTSET) : new ArrayList<String>();
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(final String connectionName) throws WekaException {
// we produce training and testset connections
if ((!connectionName.equals(StepManager.CON_TRAININGSET) && !connectionName.equals(StepManager.CON_TESTSET)) || this.getStepManager().numIncomingConnections() == 0) {
return null;
}
// our output structure is the same as whatever kind of input we are getting
Instances strucForDatasetCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
return strucForDatasetCon;
}
Instances strucForTestsetCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_TESTSET);
if (strucForTestsetCon != null) {
return strucForTestsetCon;
}
Instances strucForTrainingCon = this.getStepManager().getIncomingStructureForConnectionType(StepManager.CON_TRAININGSET);
if (strucForTrainingCon != null) {
return strucForTrainingCon;
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/TrainingSetMaker.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TrainingSetMaker.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Instances;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.StepManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Step that converts an incoming dataSet or testSet into a trainingSet.
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "TrainingSetMaker", category = "Evaluation",
toolTipText = "Make an incoming dataSet or testSet into a trainingSet",
iconPath = KFGUIConsts.BASE_ICON_PATH + "TrainingSetMaker.gif")
public class TrainingSetMaker extends BaseStep {
private static final long serialVersionUID = 1082946912813721183L;
/**
* Initialize the step
*/
@Override
public void stepInit() {
// nothing to do
}
/**
* Process an incoming data payload (if the step accepts incoming connections)
*
* @param data the data to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
String incomingConnName = data.getConnectionName();
Instances insts = (Instances) data.getPayloadElement(incomingConnName);
if (insts == null) {
throw new WekaException("Incoming instances should not be null!");
}
getStepManager().logBasic(
"Creating a training set for relation " + insts.relationName());
Data newData = new Data();
newData.setPayloadElement(StepManager.CON_TRAININGSET, insts);
newData.setPayloadElement(StepManager.CON_AUX_DATA_SET_NUM, 1);
newData.setPayloadElement(StepManager.CON_AUX_DATA_MAX_SET_NUM, 1);
if (!isStopRequested()) {
getStepManager().outputData(StepManager.CON_TRAININGSET, newData);
}
getStepManager().finished();
}
/**
* Get a list of incoming connection types that this step can accept. Ideally
* (and if appropriate), this should take into account the state of the step
* and any existing incoming connections. E.g. a step might be able to accept
* one (and only one) incoming batch data connection.
*
* @return a list of incoming connections that this step can accept given its
* current state
*/
@Override
public List<String> getIncomingConnectionTypes() {
if (getStepManager().numIncomingConnections() == 0) {
return Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TESTSET);
}
return new ArrayList<String>();
}
/**
* Get a list of outgoing connection types that this step can produce. Ideally
* (and if appropriate), this should take into account the state of the step
* and the incoming connections. E.g. depending on what incoming connection is
* present, a step might be able to produce a trainingSet output, a testSet
* output or neither, but not both.
*
* @return a list of outgoing connections that this step can produce
*/
@Override
public List<String> getOutgoingConnectionTypes() {
if (getStepManager().numIncomingConnections() > 0) {
return Arrays.asList(StepManager.CON_TRAININGSET);
}
return new ArrayList<String>();
}
/**
* If possible, get the output structure for the named connection type as a
* header-only set of instances. Can return null if the specified connection
* type is not representable as Instances or cannot be determined at present.
*
* @param connectionName the name of the connection type to get the output
* structure for
* @return the output structure as a header-only Instances object
* @throws WekaException if a problem occurs
*/
@Override
public Instances outputStructureForConnectionType(String connectionName)
throws WekaException {
if (!connectionName.equals(StepManager.CON_TRAININGSET)
|| getStepManager().numIncomingConnections() == 0) {
return null;
}
Instances strucForDatasetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_DATASET);
if (strucForDatasetCon != null) {
return strucForDatasetCon;
}
Instances strucForTestsetCon =
getStepManager().getIncomingStructureForConnectionType(
StepManager.CON_TESTSET);
if (strucForTestsetCon != null) {
return strucForTestsetCon;
}
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/WekaAlgorithmWrapper.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* WekaAlgorithmWrapper.java
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Utils;
import weka.gui.ProgrammaticProperty;
import weka.gui.knowledgeflow.StepVisual;
import java.io.Serializable;
import java.lang.annotation.Annotation;
/**
* A step that wraps a class of standard Weka algorithm (e.g. filter,
* classifier, clusterer etc.)
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
public abstract class WekaAlgorithmWrapper extends BaseStep implements
Serializable {
private static final long serialVersionUID = -1013404060247467085L;
/** Icon path to the specific icon for the wrapped algorithim */
protected String m_iconPath;
/**
* Icon path to the default icon at the package level - e.g.
* weka.classifiers.rules
*/
protected String m_defaultPackageIconPath;
/**
* Icon path to the default icon for the type of wrapped algorithm - e.g.
* Classifier, Loader etc.
*/
protected String m_defaultIconPath;
/** The wrapped algorithm */
protected Object m_wrappedAlgorithm;
/**
* Get global "help" info. Returns the global info of the wrapped algorithm
*
* @return global "help" info
*/
@Override
public String globalInfo() {
if (getWrappedAlgorithm() != null) {
return Utils.getGlobalInfo(getWrappedAlgorithm(), false);
}
return super.globalInfo();
}
/**
* Get the wrapped algorithm
*
* @return the wrapped algorithm
*/
@NotPersistable
@ProgrammaticProperty
public Object getWrappedAlgorithm() {
return m_wrappedAlgorithm;
}
/**
* Set the wrapped algorithm
*
* @param algo the algorithm to wrao
*/
public void setWrappedAlgorithm(Object algo) {
m_wrappedAlgorithm = algo;
String className = algo.getClass().getCanonicalName();
String name = className.substring(className.lastIndexOf(".") + 1);
String packageName = className.substring(0, className.lastIndexOf("."));
// preserve the existing name if already set (i.e. the name property might
// get set first by the flow loading process before setWrappedAlgorithm()
// is invoked
Annotation stepA = this.getClass().getAnnotation(KFStep.class);
if (getName() == null || getName().length() == 0
|| (stepA != null && getName().equals(((KFStep) stepA).name()))) {
setName(name);
}
m_defaultPackageIconPath = StepVisual.BASE_ICON_PATH + packageName + ".gif";
m_iconPath = StepVisual.BASE_ICON_PATH + name + ".gif";
}
/**
* Get the path to the icon for this wrapped algorithm
*
* @return the path to the icon
*/
public String getIconPath() {
return m_iconPath;
}
/**
* Get the default icon at the package level for this type of wrapped
* algorithm - e.g. weka.classifiers.meta
*
* @return the default icon at the package level
*/
public String getDefaultPackageLevelIconPath() {
return m_defaultPackageIconPath;
}
/**
* Get the default icon for this type of wrapped algorithm (i.e. generic
* Loader, Saver etc.
*
* @return the default icon for this wrapped algorithm
*/
public String getDefaultIconPath() {
return m_defaultIconPath;
}
/**
* Get the class of the algorithm being wrapped
*
* @return the class of the algorithm being wrapped
*/
public abstract Class getWrappedAlgorithmClass();
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/WriteDataToResult.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* WriteDataToResult.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import weka.core.Environment;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.JobEnvironment;
import weka.knowledgeflow.StepManager;
import java.util.Arrays;
import java.util.List;
/**
* Step that stores incoming non-incremental data in the job environment
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(
name = "WriteDataToResult",
category = "Flow",
toolTipText = "Write incoming non-incremental data to the results store in the "
+ "job environment", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "WriteDataToResult.gif")
public class WriteDataToResult extends BaseStep {
private static final long serialVersionUID = -1932252461151862615L;
@Override
public void stepInit() throws WekaException {
Environment env =
getStepManager().getExecutionEnvironment().getEnvironmentVariables();
if (!(env instanceof JobEnvironment)) {
JobEnvironment jobEnvironment = new JobEnvironment(env);
getStepManager().getExecutionEnvironment().setEnvironmentVariables(
jobEnvironment);
}
}
@Override
public void processIncoming(Data data) throws WekaException {
getStepManager().processing();
JobEnvironment jobEnvironment =
(JobEnvironment) getStepManager().getExecutionEnvironment()
.getEnvironmentVariables();
getStepManager().logDetailed(
"Storing " + data.getConnectionName() + " in " + "result");
jobEnvironment.addToResult(data);
getStepManager().finished();
}
@Override
public List<String> getIncomingConnectionTypes() {
return Arrays.asList(StepManager.CON_DATASET, StepManager.CON_TRAININGSET,
StepManager.CON_TESTSET, StepManager.CON_BATCH_CLASSIFIER,
StepManager.CON_BATCH_CLUSTERER, StepManager.CON_BATCH_ASSOCIATOR,
StepManager.CON_TEXT, StepManager.CON_IMAGE);
}
@Override
public List<String> getOutgoingConnectionTypes() {
return null;
}
}
|
0
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow
|
java-sources/ai/libs/thirdparty/interruptible-weka/0.1.6/weka/knowledgeflow/steps/WriteWekaLog.java
|
/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* WriteToWekaLog.java
* Copyright (C) 2016 University of Waikato, Hamilton, New Zealand
*
*/
package weka.knowledgeflow.steps;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import weka.core.OptionMetadata;
import weka.core.WekaException;
import weka.gui.knowledgeflow.KFGUIConsts;
import weka.knowledgeflow.Data;
import weka.knowledgeflow.LoggingLevel;
import weka.knowledgeflow.StepManager;
/**
* Step that takes incoming data and writes it to the Weka log
*
* @author Mark Hall (mhall{[at]}pentaho{[dot]}com)
* @version $Revision: $
*/
@KFStep(name = "WriteToWekaLog", category = "Flow",
toolTipText = "Write data to the log", iconPath = KFGUIConsts.BASE_ICON_PATH
+ "WriteWekaLog.gif")
public class WriteWekaLog extends BaseStep {
private static final long serialVersionUID = -2306717547200779711L;
/** How often to write incremental data to the log */
protected String m_incrementalWriteFrequency = "1000";
/** Resolved frequency */
protected int m_incrFreq = 1000;
/** Count of how many incremental data points have been seen so far */
protected int m_incrCount;
/** True if the step has been reset */
protected boolean m_isReset;
/** True if the input is incremental */
protected boolean m_inputIsIncremental;
/** Level to log at */
protected LoggingLevel m_logLevel = LoggingLevel.BASIC;
/**
* Set the logging level to use
*
* @param level the level to use
*/
@OptionMetadata(displayName = "Logging level", description = "The level at "
+ "which to write log messages", displayOrder = 1)
public void setLoggingLevel(LoggingLevel level) {
m_logLevel = level;
}
/**
* Get the logging level to use
*
* @return the level to use
*/
public LoggingLevel getLoggingLevel() {
return m_logLevel;
}
/**
* Set how frequently to write an incremental data point to the log
*
* @param frequency the frequency (in data points) to write to the log
*/
@OptionMetadata(displayName = "Incremental logging frequency",
description = "How often to write an incremental/streaming data point "
+ "to the log", displayOrder = 2)
public void setIncrementalLoggingFrequency(String frequency) {
m_incrementalWriteFrequency = frequency;
}
/**
* Get how frequently to write an incremental data point to the log
*
* @return the frequency (in data points) to write to the log
*/
public String getIncrementalLoggingFrequency() {
return m_incrementalWriteFrequency;
}
/**
* Initialize the step
*
* @throws WekaException if a problem occurs
*/
@Override
public void stepInit() throws WekaException {
m_isReset = true;
m_incrCount = 0;
m_inputIsIncremental = false;
String resolvedFreq =
getStepManager().environmentSubstitute(m_incrementalWriteFrequency);
if (resolvedFreq.length() > 0) {
try {
m_incrFreq = Integer.parseInt(m_incrementalWriteFrequency);
} catch (NumberFormatException ex) {
getStepManager().logWarning(
"Unable to parse incremental write frequency " + "setting "
+ resolvedFreq);
}
}
}
/**
* Process an incoming piece of data
*
* @param data the payload to process
* @throws WekaException if a problem occurs
*/
@Override
public void processIncoming(Data data) throws WekaException {
if (m_isReset) {
m_isReset = false;
m_inputIsIncremental =
data.getPayloadElement(StepManager.CON_AUX_DATA_IS_INCREMENTAL, false);
}
if (m_inputIsIncremental) {
processStreaming(data);
if (isStopRequested()) {
getStepManager().interrupted();
}
} else {
getStepManager().processing();
processBatch(data);
if (isStopRequested()) {
getStepManager().interrupted();
} else {
getStepManager().finished();
}
}
}
/**
* Process a streaming data point
*
* @param data the data
* @throws WekaException if a problem occurs
*/
protected void processStreaming(Data data) throws WekaException {
Object payload = data.getPrimaryPayload();
if (m_incrCount % m_incrFreq == 0 && payload != null) {
getStepManager().log(payload.toString(), m_logLevel);
}
m_incrCount++;
}
/**
* Process a batch data point
*
* @param data the data
* @throws WekaException if a problem occurs
*/
protected void processBatch(Data data) throws WekaException {
Object payload = data.getPrimaryPayload();
if (payload != null) {
getStepManager().log(payload.toString(), m_logLevel);
}
}
/**
* Get a list of acceptable incoming connection types (at this point in time)
*
* @return a list of legal incoming connection types to accept
*/
@Override
public List<String> getIncomingConnectionTypes() {
List<String> result = new ArrayList<>();
if (getStepManager().numIncomingConnections() == 0) {
result.add(StepManager.CON_INSTANCE);
}
if (getStepManager().numIncomingConnectionsOfType(
StepManager.CON_INSTANCE) == 0) {
result.addAll(Arrays.asList(StepManager.CON_DATASET,
StepManager.CON_TRAININGSET, StepManager.CON_TESTSET,
StepManager.CON_TEXT, StepManager.CON_BATCH_ASSOCIATOR,
StepManager.CON_BATCH_CLASSIFIER, StepManager.CON_BATCH_CLUSTERER));
}
return result;
}
/**
* Get currently generatable outgoing connection types
*
* @return a list of outgoing connection types
*/
@Override
public List<String> getOutgoingConnectionTypes() {
Map<String, List<StepManager>> incoming =
getStepManager().getIncomingConnections();
return new ArrayList<>(incoming.keySet());
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/Constants.java
|
package ai.lilystyle.analytics_android;
import android.os.Build;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
final class Constants {
final static String LOG_TAG = "Lily_AI";
final static String META_BASE_URL_KEY = "ai.lily.analytics.endpoint";
final static String META_API_TOKEN_KEY = "ai.lily.analytics.api_token";
final static String META_API_KEY_KEY = "ai.lily.analytics.api_key";
final static String PREFS_FILE_NAME = "ai.lily.analytics.preferences";
final static String UUID_PREF_NAME = "ai.lily.analytics.uuid";
final static String SESSION_ID_PREF_NAME = "ai.lily.analytics.session_id";
final static String SESSION_START_PREF_NAME = "ai.lily.analytics.session_start";
final static String SESSION_LAST_EVENT_PREF_NAME = "ai.lily.analytics.session_last_event";
final static String SESSION_DURATION = "ai.lily.analytics.session_duration";
final static String LAST_TRACKED_ID = "ai.lily.analytics.last_tracked_id";
final static String TRACKED_DATA_STORAGE_DIR = "ai.lily.analytics.storage_data_%d";
final static int THREAD_POOL_SIZE = 3;
final static String THREAD_NAME = "LilyAI-Worker-%d";
final static int HTTP_CONNECT_TIMEOUT = 10;
final static int HTTP_READ_TIMEOUT = 10;
final static int HTTP_WRITE_TIMEOUT = 30;
final static long DEFAULT_SESSION_DURATION = 30 * 60000L;
final static Map<String, String> defaultHeaders = new HashMap<>();
static {
defaultHeaders.put("uagent", String.format(Locale.getDefault(), "ai.lily.analytics_android v1.0 device: (%s, %s, %s), os: %s (SDK: %d)",
Build.BRAND, Build.MODEL, Build.DEVICE, Build.DISPLAY, Build.VERSION.SDK_INT));
}
private Constants() {}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/LilyAi.java
|
package ai.lilystyle.analytics_android;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import org.jetbrains.annotations.NotNull;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import okhttp3.OkHttpClient;
public final class LilyAi {
private final static Map<String, LilyAi> instancesMap = new HashMap<>();
private final SharedPreferences prefs;
private final String baseUrl;
private final Map<String, String> headers = new ConcurrentHashMap<>();
private final TrackingDataPersistentStorage trackingDataStorage;
private long lastTrackId;
private final String token;
private final String apiKey;
private final ExecutorService executorService;
private String uuid;
private String sessionId;
private Long sessionStartTime;
private Long sessionLastEventOccurred;
private long sessionDuration;
private final OkHttpClient okHttpClient = RequestsHelper.getOkHttpClient();
private final Handler handler;
private LilyAi(Context context, String baseUrl, String token, String apiKey) {
this.baseUrl = baseUrl;
this.token = token;
this.apiKey = apiKey;
prefs = context.getApplicationContext().getSharedPreferences(Constants.PREFS_FILE_NAME+(apiKey+token+baseUrl).hashCode(), Context.MODE_PRIVATE);
executorService = new ThreadPoolExecutor(Constants.THREAD_POOL_SIZE, Constants.THREAD_POOL_SIZE,
1, TimeUnit.MINUTES, new LinkedBlockingDeque<Runnable>(), new WorkerThreadFactory());
uuid = Utils.getUUID(prefs, true);
sessionId = prefs.getString(Constants.SESSION_ID_PREF_NAME, null);
sessionStartTime = prefs.getLong(Constants.SESSION_START_PREF_NAME, 0);
sessionLastEventOccurred = prefs.getLong(Constants.SESSION_LAST_EVENT_PREF_NAME, 0);
sessionDuration = prefs.getLong(Constants.SESSION_DURATION, Constants.DEFAULT_SESSION_DURATION);
lastTrackId = prefs.getLong(Constants.LAST_TRACKED_ID, 0);
trackingDataStorage = TrackingDataPersistentStorage.getInstance(context.getApplicationContext(),
String.format(Locale.getDefault(), Constants.TRACKED_DATA_STORAGE_DIR, baseUrl.hashCode()),
new TrackingDataPersistentStorageListener() {
@Override
public void withUndeliveredData(TrackingData trackingData) {
synchronized (trackingDataStorage) {
if (!trackingDataStorage.isSending(trackingData)) {
trackingDataStorage.put(trackingData, true);
executorService.submit(new Worker(LilyAi.this.baseUrl, trackingData, true, okHttpClient, workerListener, null));
}
}
}
});
handler = new Handler(Looper.getMainLooper());
}
public static LilyAi getInstance(Context context) {
return getInstance(context, null, null, null);
}
public static LilyAi getInstance(Context context, String token) {
return getInstance(context, null, token, null);
}
public static LilyAi getInstance(Context context, String baseUrl, String token, String apiKey) {
if (context == null) {
Log.e(Constants.LOG_TAG, "Context can't be null.");
return null;
}
if (baseUrl == null) {
baseUrl = Utils.getMetadataString(context, Constants.META_BASE_URL_KEY);
if (baseUrl == null) {
Log.e(Constants.LOG_TAG, "Application meta-data " + Constants.META_BASE_URL_KEY +
" is not set. Set it or provide it with getInstance() method.");
return null;
}
}
if (token == null) {
token = Utils.getMetadataString(context, Constants.META_API_TOKEN_KEY);
if (token == null) {
Log.e(Constants.LOG_TAG, "Application meta-data " + Constants.META_API_TOKEN_KEY +
" is not set. Set it or provide it with getInstance() method.");
return null;
}
}
if (apiKey == null) {
apiKey = Utils.getMetadataString(context, Constants.META_API_KEY_KEY);
if (apiKey == null) {
Log.e(Constants.LOG_TAG, "Application meta-data " + Constants.META_API_KEY_KEY +
" is not set. Set it or provide it with getInstance() method.");
return null;
}
}
synchronized (instancesMap) {
LilyAi instance = instancesMap.get(apiKey+token+baseUrl);
if (instance == null) {
instance = new LilyAi(context, baseUrl, token, apiKey);
instancesMap.put(apiKey+token+baseUrl, instance);
}
return instance;
}
}
public void resetUUID() {
uuid = Utils.getUUID(prefs, false);
sessionId = null;
prefs.edit().putString(Constants.SESSION_ID_PREF_NAME, null).apply();
}
public void setUUID(@NotNull String uuid) {
if (uuid.isEmpty()) {
Log.e(Constants.LOG_TAG, "LilyAI UUID can't be null or empty string! Ignoring setUUID() call with argument "+ uuid);
return;
}
if (!uuid.equals(this.uuid)) {
sessionId = null;
prefs.edit().putString(Constants.SESSION_ID_PREF_NAME, null).apply();
}
this.uuid = uuid;
Utils.updateUUID(prefs, uuid);
}
public String getUUID() {
return uuid;
}
public void setSessionDuration(long sessionDuration) {
this.sessionDuration = sessionDuration;
prefs.edit().putLong(Constants.SESSION_DURATION, sessionDuration).apply();
}
public void track(JSONObject data) {
track(data, null);
}
public void track(JSONObject data, final LilyAiListener listener) {
if (data == null) {
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onError("JSON data is null");
}
});
}
return;
}
if (sessionLastEventOccurred != null && sessionLastEventOccurred > 0 && System.currentTimeMillis() - sessionLastEventOccurred > sessionDuration) {
sessionId = null;
}
if (sessionId == null) {
sessionId = getUUID() + "-" + System.currentTimeMillis();
sessionStartTime = System.currentTimeMillis();
prefs.edit().putString(Constants.SESSION_ID_PREF_NAME, sessionId)
.putLong(Constants.SESSION_START_PREF_NAME, sessionStartTime)
.apply();
}
addHeader("lsid", sessionId);
addHeader("lsstart", (sessionStartTime != null && sessionStartTime > 0) ? sessionStartTime.toString() : null);
addHeader("lsend", String.valueOf(System.currentTimeMillis() + sessionDuration));
sessionLastEventOccurred = System.currentTimeMillis();
prefs.edit().putLong(Constants.SESSION_LAST_EVENT_PREF_NAME, sessionLastEventOccurred).apply();
addHeader("x-api-key", apiKey);
addHeader("Api-Token", token);
addHeader("lpid", uuid);
synchronized (trackingDataStorage) {
TrackingData newData = new TrackingData(lastTrackId++, headers, data);
prefs.edit().putLong(Constants.LAST_TRACKED_ID, lastTrackId).apply();
trackingDataStorage.put(newData, true);
executorService.submit(new Worker(baseUrl, newData, false, okHttpClient, workerListener, listener));
trackingDataStorage.getNotDelivered(-1, Constants.THREAD_POOL_SIZE);
}
}
public void addHeader(String name, String value) {
if (name != null) {
if (value == null) {
headers.remove(name);
} else {
headers.put(name, value);
}
}
}
public void setUserID(String uid) {
addHeader("uid", uid);
}
public void setAnalyticsProviderID(String aid) {
addHeader("aid", aid);
}
public void setAnalyticsSessionID(String sid) {
addHeader("sid", sid);
}
public void setHashedUserEmail(String uem) {
addHeader("uem", uem);
}
public void setReferer(String src, String mdm, String pgpath, String pgcat, String pgtype) {
try {
JSONObject referer = new JSONObject();
if (src != null) {
referer.put("src", src);
}
if (mdm != null) {
referer.put("mdm", mdm);
}
if (pgpath != null) {
referer.put("pgpath", pgpath);
}
if (pgcat != null) {
referer.put("pgcat", pgcat);
}
if (pgtype != null) {
referer.put("pgtype", pgtype);
}
if (src != null || mdm != null || pgpath != null || pgcat != null || pgtype != null) {
addHeader("referrer_info", referer.toString());
} else {
addHeader("referrer_info", null);
}
} catch (Exception e) {
addHeader("referrer_info", null);
}
}
public void setExperimentId(String expid) {
addHeader("expid", expid);
}
public void setUserSourceIP(String sip) {
addHeader("sip", sip);
}
public void setVisitorId(String vid) {
addHeader("vid", vid);
}
private final WorkerListener workerListener = new WorkerListener() {
@Override
public void onSuccess(TrackingData data, boolean isFromRetry, final LilyAiListener lilyAiListener) {
synchronized (trackingDataStorage) {
trackingDataStorage.delivered(data);
if (isFromRetry) {
trackingDataStorage.getNotDelivered(data.id, 1);
}
}
if (lilyAiListener != null) {
handler.post(new Runnable() {
@Override
public void run() {
lilyAiListener.onSuccess();
}
});
}
}
@Override
public void onError(TrackingData data, boolean isFromRetry, int code, final String message, final LilyAiListener lilyAiListener) {
synchronized (trackingDataStorage) {
if (code <= 0 || (code >= 500 && code < 600)) {
trackingDataStorage.put(data, false);
} else {
trackingDataStorage.delivered(data);
if (isFromRetry) {
trackingDataStorage.getNotDelivered(data.id, 1);
}
}
}
if (lilyAiListener != null) {
handler.post(new Runnable() {
@Override
public void run() {
lilyAiListener.onError(message);
}
});
}
}
};
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/LilyAiListener.java
|
package ai.lilystyle.analytics_android;
public interface LilyAiListener {
void onSuccess();
void onError(String message);
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/RequestsHelper.java
|
package ai.lilystyle.analytics_android;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import okhttp3.Interceptor;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
final class RequestsHelper {
private RequestsHelper() {}
static Response postPixel(String baseUrl, TrackingData data, OkHttpClient client) throws IOException {
RequestBody body = RequestBody.create(data.data, MediaType.get("application/json; charset=utf-8"));
Request.Builder requestBuilder = new Request.Builder().url(baseUrl)
.post(body);
for (String name : data.headers.keySet()) {
if (name != null) {
String value = data.headers.get(name);
if (value != null) {
requestBuilder.header(name, value);
}
}
}
return client.newCall(requestBuilder.build()).execute();
}
static OkHttpClient getOkHttpClient() {
OkHttpClient.Builder builder = new OkHttpClient.Builder()
.connectTimeout(Constants.HTTP_CONNECT_TIMEOUT, TimeUnit.SECONDS)
.readTimeout(Constants.HTTP_READ_TIMEOUT, TimeUnit.SECONDS)
.writeTimeout(Constants.HTTP_WRITE_TIMEOUT, TimeUnit.SECONDS)
.addInterceptor(new Interceptor() {
@NotNull
@Override
public Response intercept(@NotNull Chain chain) throws IOException {
Request.Builder builder = chain.request().newBuilder();
for (String header : Constants.defaultHeaders.keySet()) {
String value = Constants.defaultHeaders.get(header);
if (value != null) {
builder.header(header, value);
}
}
builder.header("ts", String.valueOf(System.currentTimeMillis()));
return chain.proceed(builder.build());
}
});
//okhttp3.logging.HttpLoggingInterceptor logger = new okhttp3.logging.HttpLoggingInterceptor();
//logger.setLevel(okhttp3.logging.HttpLoggingInterceptor.Level.BODY);
//builder.addInterceptor(logger);
return builder.build();
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/TrackingData.java
|
package ai.lilystyle.analytics_android;
import org.json.JSONObject;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
class TrackingData implements Serializable {
final long id;
final Map<String, String> headers;
final String data;
TrackingData(long id, Map<String, String> headers, JSONObject data) {
this.id = id;
this.headers = new HashMap<>();
this.headers.putAll(headers);
this.data = data.toString();
}
@Override
public int hashCode() {
return Long.valueOf(id).hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj instanceof TrackingData) ? id == ((TrackingData) obj).id : super.equals(obj);
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/TrackingDataPersistentStorage.java
|
package ai.lilystyle.analytics_android;
import android.content.Context;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
class TrackingDataPersistentStorage extends Thread {
private final Context context;
private final Set<Long> sendingData = new HashSet<>();
private final Set<TrackingData> trackingData = new HashSet<>();
private final Set<Long> deliveredData = new HashSet<>();
private final String filesDir;
private volatile boolean notifyWithNotDeliveredData = true;
private volatile long lastDataId = -1;
private volatile int nextDataCount = 2;
private final Set<TrackingDataPersistentStorageListener> listeners = new HashSet<>();
private final static Map<String, TrackingDataPersistentStorage> instancesMap = new HashMap<>();
private TrackingDataPersistentStorage(Context context, String filesDir) {
this.context = context;
this.filesDir = filesDir;
setName(String.format(Locale.getDefault(), Constants.THREAD_NAME, 0));
start();
}
public static TrackingDataPersistentStorage getInstance(Context context, String filesDir, TrackingDataPersistentStorageListener listener) {
TrackingDataPersistentStorage instance;
synchronized (instancesMap) {
instance = instancesMap.get(filesDir);
if (instance == null) {
instance = new TrackingDataPersistentStorage(context, filesDir);
instancesMap.put(filesDir, instance);
}
}
instance.addListener(listener);
return instance;
}
private void addListener(TrackingDataPersistentStorageListener listener) {
synchronized (listeners) {
listeners.add(listener);
}
}
void put(TrackingData data, boolean isSending) {
synchronized (trackingData) {
trackingData.add(data);
if (isSending) {
sendingData.add(data.id);
} else {
sendingData.remove(data.id);
}
deliveredData.remove(data.id);
trackingData.notifyAll();
}
}
void delivered(TrackingData data) {
synchronized (trackingData) {
sendingData.remove(data.id);
trackingData.remove(data);
deliveredData.add(data.id);
trackingData.notifyAll();
}
}
void getNotDelivered(long lastId, int count) {
synchronized (trackingData) {
lastDataId = lastId;
nextDataCount = count > 0 ? count : 1;
notifyWithNotDeliveredData = true;
trackingData.notifyAll();
}
}
boolean isSending(TrackingData data) {
synchronized (trackingData) {
return sendingData.contains(data.id);
}
}
@Override
public void run() {
while (true) {
Set<TrackingData> dataToSave = new HashSet<>();
Set<Long> dataToRemove = new HashSet<>();
synchronized (trackingData) {
for (Iterator<TrackingData> i = trackingData.iterator(); i.hasNext();) {
TrackingData data = i.next();
if (deliveredData.contains(data.id)) {
i.remove();
}
}
dataToSave.addAll(trackingData);
dataToRemove.addAll(deliveredData);
if (!notifyWithNotDeliveredData && dataToSave.isEmpty() && dataToRemove.isEmpty()) {
try {
trackingData.wait();
continue;
} catch (InterruptedException e) {
break;
}
}
}
for (TrackingData data : dataToSave) {
saveToDisk(data);
}
for (Long id : dataToRemove) {
removeData(id);
}
if (!dataToSave.isEmpty() || dataToRemove.isEmpty()) {
synchronized (trackingData) {
trackingData.removeAll(dataToSave);
deliveredData.removeAll(dataToRemove);
}
}
if (notifyWithNotDeliveredData) {
Set<Long> nowSending = new HashSet<>();
synchronized (trackingData) {
notifyWithNotDeliveredData = false;
nowSending.addAll(sendingData);
}
Set<TrackingData> undeliveredData = restoreFromDisk(lastDataId, nextDataCount, nowSending);
synchronized (listeners) {
for (TrackingData data : undeliveredData) {
for (TrackingDataPersistentStorageListener listener : listeners) {
listener.withUndeliveredData(data);
}
}
}
}
}
}
@SuppressWarnings("unchecked")
private Set<TrackingData> restoreFromDisk(long lastDataId, int nextDataCount, Set<Long> nowSending) {
Set<TrackingData> undeliveredData = new HashSet<>();
String fileNames[] = new File(context.getFilesDir(), filesDir).list();
List<Long> ids = new ArrayList<>();
if (fileNames != null) {
for (String name : fileNames) {
long id;
try {
id = Long.parseLong(name);
} catch (Exception e) {
continue;
}
ids.add(id);
}
}
Collections.sort(ids);
for (Long id : ids) {
if (id > lastDataId && !nowSending.contains(id)) {
TrackingData data = restoreFromDiskOrDelete(id);
if (data != null) {
undeliveredData.add(data);
nextDataCount--;
}
}
if (nextDataCount <= 0) {
break;
}
}
return undeliveredData;
}
private TrackingData restoreFromDiskOrDelete(long id) {
File file = new File (new File(context.getFilesDir(), filesDir), String.valueOf(id));
if (file.isFile()) {
try (FileInputStream is = new FileInputStream(file); ObjectInputStream ois = new ObjectInputStream(is)) {
return (TrackingData) ois.readObject();
} catch (Exception e) {
file.delete();
}
}
return null;
}
private void saveToDisk(TrackingData data) {
File file = new File (new File(context.getFilesDir(), filesDir), String.valueOf(data.id));
if (!file.exists()) {
File parent = file.getParentFile();
if (parent != null) {
file.getParentFile().mkdirs();
}
try (FileOutputStream os = new FileOutputStream(file); ObjectOutputStream oos = new ObjectOutputStream(os)) {
oos.writeObject(data);
} catch (Exception ignored) {}
}
}
private void removeData(long id) {
File file = new File (new File(context.getFilesDir(), filesDir), String.valueOf(id));
file.delete();
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/TrackingDataPersistentStorageListener.java
|
package ai.lilystyle.analytics_android;
interface TrackingDataPersistentStorageListener {
void withUndeliveredData(TrackingData data);
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/Utils.java
|
package ai.lilystyle.analytics_android;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.os.SystemClock;
import java.util.UUID;
final class Utils {
private Utils() {}
static String getMetadataString(Context context, String key) {
if (context == null) return null;
context = context.getApplicationContext();
try {
ApplicationInfo app = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_META_DATA);
Bundle bundle = app.metaData;
return bundle.getString(key);
} catch (PackageManager.NameNotFoundException ignored) {}
return null;
}
static String getUUID(SharedPreferences prefs, boolean tryRestore) {
String uuid = "";
try {
uuid = prefs.getString(Constants.UUID_PREF_NAME, uuid);
} catch (Exception ignored) {}
if (tryRestore && !uuid.isEmpty()) {
return uuid;
}
uuid = generateUUID();
updateUUID(prefs, uuid);
return uuid;
}
private static String generateUUID() {
UUID randomUUID = UUID.randomUUID();
long msb = randomUUID.getMostSignificantBits() ^ System.currentTimeMillis() ^ ((long) (Build.BRAND+Build.MODEL).hashCode());
long lsb = randomUUID.getLeastSignificantBits() ^ SystemClock.uptimeMillis() ^ ((long) (Build.DISPLAY+Build.DEVICE).hashCode());
return new UUID(msb, lsb).toString();
}
static void updateUUID(SharedPreferences prefs, String uuid) {
prefs.edit().putString(Constants.UUID_PREF_NAME, uuid).apply();
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/Worker.java
|
package ai.lilystyle.analytics_android;
import org.json.JSONObject;
import okhttp3.OkHttpClient;
import okhttp3.Response;
import okhttp3.ResponseBody;
class Worker implements Runnable {
private final OkHttpClient okHttpClient;
private final String baseUrl;
private final TrackingData data;
private final WorkerListener listener;
private final LilyAiListener lilyAiListener;
private final boolean isFromRetry;
Worker(String baseUrl, TrackingData data, boolean isFromRetry, OkHttpClient okHttpClient, WorkerListener listener, LilyAiListener lilyAiListener) {
this.okHttpClient = okHttpClient;
this.baseUrl = baseUrl;
this.data = data;
this.listener = listener;
this.lilyAiListener = lilyAiListener;
this.isFromRetry = isFromRetry;
}
@Override
public void run() {
try (Response response = RequestsHelper.postPixel(baseUrl, data, okHttpClient)) {
ResponseBody body = response.body();
if (body == null) {
listener.onError(data, isFromRetry, response.code(), null, lilyAiListener);
} else {
JSONObject responseData = new JSONObject(body.string());
if (responseData.has("success")) {
if (responseData.getBoolean("success")) {
listener.onSuccess(data, isFromRetry, lilyAiListener);
return;
}
}
listener.onError(data, isFromRetry, response.code(), responseData.getString("message"), lilyAiListener);
}
} catch (Exception e) {
listener.onError(data, isFromRetry, 0, e.getMessage(), lilyAiListener);
}
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/WorkerListener.java
|
package ai.lilystyle.analytics_android;
interface WorkerListener {
void onSuccess(TrackingData data, boolean isFromRetry, LilyAiListener lilyAiListener);
void onError(TrackingData data, boolean isFromRetry, int code, String message, LilyAiListener lilyAiListener);
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/WorkerThreadFactory.java
|
package ai.lilystyle.analytics_android;
import org.jetbrains.annotations.NotNull;
import java.util.Locale;
import java.util.concurrent.ThreadFactory;
class WorkerThreadFactory implements ThreadFactory {
private int count = 1;
@Override
public Thread newThread(@NotNull Runnable worker) {
Thread thread = new Thread(worker);
thread.setName(String.format(Locale.getDefault(), Constants.THREAD_NAME, count++));
return thread;
}
}
|
0
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android
|
java-sources/ai/lilystyle/android-sdk/1.0.8/ai/lilystyle/analytics_android/data/DataFactory.java
|
package ai.lilystyle.analytics_android.data;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class DataFactory {
private DataFactory() {}
public static BaseMessageBuilder createBaseMessage() {
return new BaseMessageBuilder();
}
public static class BaseMessageBuilder {
private static final String LOG_TAG = "Lily_AI.datafactory";
private JSONObject data = new JSONObject();
BaseMessageBuilder() {}
public JSONObject build() {
return data;
}
public BaseMessageBuilder set(String key, String value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, double value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, long value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, int value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, boolean value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, JSONObject value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, JSONArray value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder set(String key, Object value) {
try {
data.put(key, value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder addToArray(String key, Object value) {
try {
JSONArray arr;
if (data.has(key)) {
arr = data.getJSONArray(key);
} else {
arr = new JSONArray();
data.put(key, arr);
}
arr.put(value);
} catch (JSONException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return this;
}
public BaseMessageBuilder setHitTimestamp(long hts) {
return set("hts", hts);
}
}
}
|
0
|
java-sources/ai/logfire/logback-logfire/0.0.2/com/logfire
|
java-sources/ai/logfire/logback-logfire/0.0.2/com/logfire/logback/LogfireAppender.java
|
package com.logfire.logback;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.IThrowableProxy;
import ch.qos.logback.core.UnsynchronizedAppenderBase;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
public class LogfireAppender extends UnsynchronizedAppenderBase<ILoggingEvent> {
// Customizable variables
protected String appName;
protected String ingestUrl = "https://in.logfire.ai";
protected String sourceToken;
protected String userAgent = "Logfire Logback Appender";
protected List<String> mdcFields = new ArrayList<>();
protected List<String> mdcTypes = new ArrayList<>();
protected int maxQueueSize = 100000;
protected int batchSize = 1000;
protected int batchInterval = 3000;
protected int connectTimeout = 5000;
protected int readTimeout = 10000;
protected int maxRetries = 5;
protected int retrySleepMilliseconds = 300;
protected PatternLayoutEncoder encoder;
// Non-customizable variables
protected Vector<ILoggingEvent> batch = new Vector<>();
protected AtomicBoolean isFlushing = new AtomicBoolean(false);
protected boolean mustReflush = false;
protected boolean warnAboutMaxQueueSize = true;
// Utils
protected ScheduledExecutorService scheduledExecutorService;
protected ScheduledFuture<?> scheduledFuture;
protected ObjectMapper dataMapper;
protected Logger logger;
protected int retrySize = 0;
protected int retries = 0;
protected boolean disabled = false;
protected ThreadFactory threadFactory = r -> {
Thread thread = Executors.defaultThreadFactory().newThread(r);
thread.setName("logfire-appender");
thread.setDaemon(true);
return thread;
};
public LogfireAppender() {
logger = LoggerFactory.getLogger(LogfireAppender.class);
dataMapper = new ObjectMapper()
.setSerializationInclusion(JsonInclude.Include.NON_NULL)
.setPropertyNamingStrategy(PropertyNamingStrategies.UPPER_CAMEL_CASE);
scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(threadFactory);
scheduledFuture = scheduledExecutorService.scheduleWithFixedDelay(new LogfireSender(), batchInterval, batchInterval, TimeUnit.MILLISECONDS);
}
@Override
protected void append(ILoggingEvent event) {
if (disabled)
return;
if (event.getLoggerName().equals(LogfireAppender.class.getName()))
return;
if (this.ingestUrl.isEmpty() || this.sourceToken == null || this.sourceToken.isEmpty()) {
// Prevent potential deadlock, when a blocking logger is configured - avoid using logger directly in append
startThread("logfire-warning-logger", () -> {
logger.warn("Missing Source token for Logfire - disabling LogfireAppender. Find out how to fix this at: https://logfire.ai/docs/logs/java ");
});
this.disabled = true;
return;
}
if (batch.size() < maxQueueSize) {
batch.add(event);
}
if (warnAboutMaxQueueSize && batch.size() == maxQueueSize) {
this.warnAboutMaxQueueSize = false;
// Prevent potential deadlock, when a blocking logger is configured - avoid using logger directly in append
startThread("logfire-error-logger", () -> {
logger.error("Maximum number of messages in queue reached ({}). New messages will be dropped.", maxQueueSize);
});
}
if (batch.size() >= batchSize) {
if (isFlushing.get())
return;
startThread("logfire-appender-flush", new LogfireSender());
}
}
protected void startThread(String threadName, Runnable runnable) {
Thread thread = Executors.defaultThreadFactory().newThread(runnable);
thread.setName(threadName);
thread.start();
}
protected void flush() {
if (batch.isEmpty())
return;
// Guaranteed to not be running concurrently
if (isFlushing.getAndSet(true))
return;
mustReflush = false;
int flushedSize = batch.size();
if (flushedSize > batchSize) {
flushedSize = batchSize;
mustReflush = true;
}
if (retries > 0 && flushedSize > retrySize) {
flushedSize = retrySize;
mustReflush = true;
}
if (!flushLogs(flushedSize)) {
mustReflush = true;
}
isFlushing.set(false);
if (mustReflush || batch.size() >= batchSize) {
flush();
}
}
protected boolean flushLogs(int flushedSize) {
retrySize = flushedSize;
try {
if (retries > maxRetries) {
batch.subList(0, flushedSize).clear();
logger.error("Dropped batch of {} logs.", flushedSize);
warnAboutMaxQueueSize = true;
retries = 0;
return true;
}
if (retries > 0) {
logger.info("Retrying to send {} logs to Logfire ({} / {})", flushedSize, retries, maxRetries);
try {
TimeUnit.MILLISECONDS.sleep(retrySleepMilliseconds);
} catch (InterruptedException e) {
// Continue
}
}
LogfireResponse response = callHttpURLConnection(flushedSize);
if (response.getStatus() >= 300 || response.getStatus() < 200) {
logger.error("Error calling Logfire : {} ({})", response.getError(), response.getStatus());
retries++;
return false;
}
batch.subList(0, flushedSize).clear();
warnAboutMaxQueueSize = true;
retries = 0;
return true;
} catch (ConcurrentModificationException e) {
logger.error("Error clearing {} logs from batch, will retry immediately.", flushedSize, e);
retries = maxRetries; // No point in retrying to send the data
} catch (JsonProcessingException e) {
logger.error("Error processing JSON data : {}", e.getMessage(), e);
retries = maxRetries; // No point in retrying when batch cannot be processed into JSON
} catch (Exception e) {
logger.error("Error trying to call Logfire : {}", e.getMessage(), e);
}
retries++;
return false;
}
protected LogfireResponse callHttpURLConnection(int flushedSize) throws IOException {
HttpURLConnection connection = getHttpURLConnection();
try {
connection.connect();
} catch (Exception e) {
logger.error("Error trying to call Logfire : {}", e.getMessage(), e);
}
try (OutputStream os = connection.getOutputStream()) {
byte[] input = batchToJson(flushedSize).getBytes(StandardCharsets.UTF_8);
os.write(input, 0, input.length);
os.flush();
}
connection.disconnect();
return new LogfireResponse(connection.getResponseMessage(), connection.getResponseCode());
}
protected HttpURLConnection getHttpURLConnection() throws IOException {
HttpURLConnection httpURLConnection = (HttpURLConnection) new URL(this.ingestUrl).openConnection();
httpURLConnection.setDoOutput(true);
httpURLConnection.setDoInput(true);
httpURLConnection.setRequestProperty("User-Agent", this.userAgent);
httpURLConnection.setRequestProperty("Accept", "application/json");
httpURLConnection.setRequestProperty("Content-Type", "application/json");
httpURLConnection.setRequestProperty("Charset", "UTF-8");
httpURLConnection.setRequestProperty("Authorization", String.format("Bearer %s", this.sourceToken));
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setConnectTimeout(this.connectTimeout);
httpURLConnection.setReadTimeout(this.readTimeout);
return httpURLConnection;
}
protected String batchToJson(int flushedSize) throws JsonProcessingException {
return this.dataMapper.writeValueAsString(
new ArrayList<>(batch.subList(0, flushedSize))
.stream()
.map(this::buildPostData)
.collect(Collectors.toList())
);
}
protected Map<String, Object> buildPostData(ILoggingEvent event) {
Map<String, Object> logLine = new HashMap<>();
logLine.put("dt", Long.toString(event.getTimeStamp()));
logLine.put("level", event.getLevel().toString());
logLine.put("app", this.appName);
logLine.put("message", generateLogMessage(event));
logLine.put("meta", generateLogMeta(event));
logLine.put("runtime", generateLogRuntime(event));
logLine.put("args", event.getArgumentArray());
if (event.getThrowableProxy() != null) {
logLine.put("throwable", generateLogThrowable(event.getThrowableProxy()));
}
return logLine;
}
protected String generateLogMessage(ILoggingEvent event) {
return this.encoder != null ? new String(this.encoder.encode(event)) : event.getFormattedMessage();
}
protected Map<String, Object> generateLogMeta(ILoggingEvent event) {
Map<String, Object> logMeta = new HashMap<>();
logMeta.put("logger", event.getLoggerName());
if (!mdcFields.isEmpty() && !event.getMDCPropertyMap().isEmpty()) {
for (Entry<String, String> entry : event.getMDCPropertyMap().entrySet()) {
if (mdcFields.contains(entry.getKey())) {
String type = mdcTypes.get(mdcFields.indexOf(entry.getKey()));
logMeta.put(entry.getKey(), getMetaValue(type, entry.getValue()));
}
}
}
return logMeta;
}
protected Map<String, Object> generateLogRuntime(ILoggingEvent event) {
Map<String, Object> logRuntime = new HashMap<>();
logRuntime.put("thread", event.getThreadName());
if (event.hasCallerData()) {
StackTraceElement[] callerData = event.getCallerData();
if (callerData.length > 0) {
StackTraceElement callerContext = callerData[0];
logRuntime.put("class", callerContext.getClassName());
logRuntime.put("method", callerContext.getMethodName());
logRuntime.put("file", callerContext.getFileName());
logRuntime.put("line", callerContext.getLineNumber());
}
}
return logRuntime;
}
protected Map<String, Object> generateLogThrowable(IThrowableProxy throwable) {
Map<String, Object> logThrowable = new HashMap<>();
logThrowable.put("message", throwable.getMessage());
logThrowable.put("class", throwable.getClassName());
logThrowable.put("stackTrace", throwable.getStackTraceElementProxyArray());
if (throwable.getCause() != null) {
logThrowable.put("cause", generateLogThrowable(throwable.getCause()));
}
return logThrowable;
}
protected Object getMetaValue(String type, String value) {
try {
switch (type) {
case "int":
return Integer.valueOf(value);
case "long":
return Long.valueOf(value);
case "boolean":
return Boolean.valueOf(value);
}
} catch (NumberFormatException e) {
logger.error("Error getting meta value - {}", e.getMessage(), e);
}
return value;
}
public class LogfireSender implements Runnable {
@Override
public void run() {
try {
flush();
} catch (Exception e) {
logger.error("Error trying to flush : {}", e.getMessage(), e);
if (isFlushing.get()) {
isFlushing.set(false);
}
}
}
}
/**
* Sets the application name for Logfire indexation.
*
* @param appName
* application name
*/
public void setAppName(String appName) {
this.appName = appName;
}
/**
* Sets the Logfire ingest API url.
*
* @param ingestUrl
* Logfire ingest url
*/
public void setIngestUrl(String ingestUrl) {
this.ingestUrl = ingestUrl;
}
/**
* Sets your Logfire source token.
*
* @param sourceToken
* your Logfire source token
*/
public void setSourceToken(String sourceToken) {
this.sourceToken = sourceToken;
}
/**
* Deprecated! Kept for backward compatibility.
* Sets your Logfire source token if unset.
*
* @param ingestKey
* your Logfire source token
*/
public void setIngestKey(String ingestKey) {
if (this.sourceToken == null) {
return;
}
this.sourceToken = ingestKey;
}
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
/**
* Sets the MDC fields that will be sent as metadata, separated by a comma.
*
* @param mdcFields
* MDC fields to include in structured logs
*/
public void setMdcFields(String mdcFields) {
this.mdcFields = Arrays.asList(mdcFields.split(","));
}
/**
* Sets the MDC fields types that will be sent as metadata, in the same order as <i>mdcFields</i> are set
* up, separated by a comma. Possible values are <i>string</i>, <i>boolean</i>, <i>int</i> and <i>long</i>.
*
* @param mdcTypes
* MDC fields types
*/
public void setMdcTypes(String mdcTypes) {
this.mdcTypes = Arrays.asList(mdcTypes.split(","));
}
/**
* Sets the maximum number of messages in the queue. Messages over the limit will be dropped.
*
* @param maxQueueSize
* max size of the message queue
*/
public void setMaxQueueSize(int maxQueueSize) {
this.maxQueueSize = maxQueueSize;
}
/**
* Sets the batch size for the number of messages to be sent via the API
*
* @param batchSize
* size of the message batch
*/
public void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
/**
* Get the batch size for the number of messages to be sent via the API
*/
public int getBatchSize() {
return batchSize;
}
/**
* Sets the maximum wait time for a batch to be sent via the API, in milliseconds.
*
* @param batchInterval
* maximum wait time for message batch [ms]
*/
public void setBatchInterval(int batchInterval) {
scheduledFuture.cancel(false);
scheduledFuture = scheduledExecutorService.scheduleWithFixedDelay(new LogfireSender(), batchInterval, batchInterval, TimeUnit.MILLISECONDS);
this.batchInterval = batchInterval;
}
/**
* Sets the connection timeout of the underlying HTTP client, in milliseconds.
*
* @param connectTimeout
* client connection timeout [ms]
*/
public void setConnectTimeout(int connectTimeout) {
this.connectTimeout = connectTimeout;
}
/**
* Sets the read timeout of the underlying HTTP client, in milliseconds.
*
* @param readTimeout
* client read timeout
*/
public void setReadTimeout(int readTimeout) {
this.readTimeout = readTimeout;
}
/**
* Sets the maximum number of retries for sending logs to Logfire. After that, current batch of logs will be dropped.
*
* @param maxRetries
* max number of retries for sending logs
*/
public void setMaxRetries(int maxRetries) {
this.maxRetries = maxRetries;
}
/**
* Sets the number of milliseconds to sleep before retrying to send logs to Logfire.
*
* @param retrySleepMilliseconds
* number of milliseconds to sleep before retry
*/
public void setRetrySleepMilliseconds(int retrySleepMilliseconds) {
this.retrySleepMilliseconds = retrySleepMilliseconds;
}
/**
* Registers a dynamically loaded Module object to ObjectMapper used for serialization of logged data.
*
* @param className
* fully qualified class name of the module, eg. "com.fasterxml.jackson.datatype.jsr310.JavaTimeModule"
*/
public void setObjectMapperModule(String className) {
try {
Module module = (Module) Class.forName(className).newInstance();
dataMapper.registerModule(module);
logger.info("Module '{}' successfully registered in ObjectMapper.", className);
} catch (ClassNotFoundException|InstantiationException|IllegalAccessException e) {
logger.error("Module '{}' couldn't be registered in ObjectMapper : ", className, e);
}
}
public void setEncoder(PatternLayoutEncoder encoder) {
this.encoder = encoder;
}
public boolean isDisabled() {
return this.disabled;
}
@Override
public void stop() {
scheduledExecutorService.shutdown();
mustReflush = true;
flush();
super.stop();
}
}
|
0
|
java-sources/ai/logfire/logback-logfire/0.0.2/com/logfire
|
java-sources/ai/logfire/logback-logfire/0.0.2/com/logfire/logback/LogfireResponse.java
|
package com.logfire.logback;
/**
* Holder for Logfire error message.
*
* @author tomas@logfire.ai
*/
public class LogfireResponse {
private String error;
private int status;
public LogfireResponse(String error, int status) {
this.error = error;
this.status = status;
}
public String getError() { return error; }
public int getStatus() { return status; }
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/APIException.java
|
package ai.lucidtech.las.sdk;
public class APIException extends Exception {
public APIException() {
super();
}
public APIException(String message) {
super(message);
}
public APIException(int code, String message) {
super("Code: " + code + "\nMessage: " + message);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/Client.java
|
package ai.lucidtech.las.sdk;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.*;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;
import org.json.JSONException;
import org.json.JSONObject;
public class Client {
private HttpClient httpClient;
private Credentials credentials;
/**
* A client to invoke api methods from Lucidtech AI Services.
*
* @param credentials Credentials to use
* @see Credentials
*/
public Client(Credentials credentials) {
this.credentials = credentials;
this.httpClient = HttpClientBuilder.create().build();
}
/**
* Create an app client, calls the POST /appClients endpoint.
*
* @see CreateAppClientOptions
* @param options Additional options to include in request body
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAppClient(
CreateAppClientOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/appClients", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create an app client, calls the POST /appClients endpoint.
*
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAppClient() throws IOException, APIException, MissingAccessTokenException {
return this.createAppClient(null);
}
/**
* Update an appClient, calls the PATCH /appClients/{appClientId} endpoint.
*
* @see UpdateAppClientOptions
* @param appClientId Id of the appClient
* @param options Additional options to include in request body
* @return AppClient response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateAppClient(
String appClientId,
UpdateAppClientOptions options
) throws IOException, APIException, MissingAccessTokenException {
String path = "/appClients/" + appClientId;
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", path, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* List appClients available, calls the GET /appClients endpoint.
*
* @see ListAppClientsOptions
* @param options Additional options to pass along as query parameters
* @return AppClients response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listAppClients(
ListAppClientsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/appClients", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List appClients available, calls the GET /appClients endpoint.
*
* @return AppClients response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listAppClients() throws IOException, APIException, MissingAccessTokenException {
return this.listAppClients(null);
}
/**
* Delete an appClient, calls the DELETE /appClients/{appClientId} endpoint.
*
* @param appClientId Id of the appClient
* @return AppClient response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteAppClient(
String appClientId
) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/appClients/" + appClientId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Create an asset, calls the POST /assets endpoint.
*
* @see CreateAssetOptions
* @param content Binary data
* @param options Additional options to include in request body
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAsset(
byte[] content,
CreateAssetOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("content", Base64.getEncoder().encodeToString(content));
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/assets", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create an asset, calls the POST /assets endpoint.
*
* @see CreateAssetOptions
* @param content Data from input stream
* @param options Additional options to include in request body
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAsset(
InputStream content,
CreateAssetOptions options
) throws IOException, APIException, MissingAccessTokenException {
return this.createAsset(IOUtils.toByteArray(content), options);
}
/**
* Create an asset, calls the POST /assets endpoint.
*
* @param content Binary data
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAsset(byte[] content) throws IOException, APIException, MissingAccessTokenException {
return this.createAsset(content, null);
}
/**
* Create an asset, calls the POST /assets endpoint.
*
* @param content Data from input stream
* @return Asset response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createAsset(InputStream content) throws IOException, APIException, MissingAccessTokenException {
return this.createAsset(IOUtils.toByteArray(content), null);
}
/**
* List assets available, calls the GET /assets endpoint.
*
* @see ListAssetsOptions
* @param options Additional options to pass along as query parameters
* @return Assets response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listAssets(ListAssetsOptions options)
throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/assets", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List assets available, calls the GET /assets endpoint.
*
* @return Assets response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listAssets() throws IOException, APIException, MissingAccessTokenException {
return this.listAssets(null);
}
/**
* Get asset, calls the GET /assets/{assetId} endpoint.
*
* @param assetId Id of the asset
* @return Asset response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getAsset(String assetId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/assets/" + assetId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Update an asset, calls the PATCH /assets/{assetId} endpoint.
*
* @see UpdateAssetOptions
* @param assetId Id of the asset
* @param options Additional options to include in request body
* @return Asset response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateAsset(
String assetId,
UpdateAssetOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", "/assets/" + assetId, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Delete an asset, calls the DELETE /assets/{assetId} endpoint.
*
* @param assetId Id of the asset
* @return Asset response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteAsset(
String assetId
) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/assets/" + assetId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Create a document, calls the POST /documents endpoint.
*
* @see CreateDocumentOptions
* @param content Binary data
* @param contentType A mime type for the document
* @param options Additional options to include in request body
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createDocument(
byte[] content,
ContentType contentType,
CreateDocumentOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("content", Base64.getEncoder().encodeToString(content));
body.put("contentType", contentType.getMimeType());
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/documents", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a document, calls the POST /documents endpoint.
*
* @see CreateDocumentOptions
* @param content Data from input stream
* @param contentType A mime type for the document
* @param options Additional options to include in request body
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createDocument(
InputStream content,
ContentType contentType,
CreateDocumentOptions options
) throws IOException, APIException, MissingAccessTokenException {
byte[] byteArrayContent = IOUtils.toByteArray(content);
return this.createDocument(byteArrayContent, contentType, options);
}
/**
* Create a document, calls the POST /documents endpoint.
*
* @see CreateDocumentOptions
* @param content Data from input stream
* @param contentType A mime type for the document
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createDocument(
InputStream content,
ContentType contentType
) throws IOException, APIException, MissingAccessTokenException {
byte[] byteArrayContent = IOUtils.toByteArray(content);
return this.createDocument(byteArrayContent, contentType, null);
}
/**
* Create a document, calls the POST /documents endpoint.
*
* @see CreateDocumentOptions
* @param content Binary data
* @param contentType A mime type for the document
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createDocument(
byte[] content,
ContentType contentType
) throws IOException, APIException, MissingAccessTokenException {
return this.createDocument(content, contentType, null);
}
/**
* List documents, calls the GET /documents endpoint.
*
* @see ListDocumentsOptions
* @param options Additional options to pass along as query parameters
* @return Documents response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listDocuments(ListDocumentsOptions options)
throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/documents", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List documents, calls the GET /documents endpoint.
*
* @return Documents response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listDocuments() throws IOException, APIException, MissingAccessTokenException {
return this.listDocuments(null);
}
/**
* Delete documents, calls the DELETE /documents endpoint.
*
* @see DeleteDocumentsOptions
* @param options Additional options to pass along as query parameters
* @return Documents response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteDocuments(
DeleteDocumentsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/documents", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Delete documents, calls the DELETE /documents endpoint.
*
* @see Client#createDocument
* @return Documents response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteDocuments() throws IOException, APIException, MissingAccessTokenException {
return this.deleteDocuments(null);
}
/**
* Get document, calls the GET /documents/{documentId} endpoint.
*
* @param documentId Id of the document
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getDocument(String documentId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/documents/" + documentId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Update document, calls the PATCH /documents/{documentId} endpoint.
*
* @see Client#createDocument
* @param documentId The document id to post groundTruth to.
* @param options Additional options to include in request body
* @return Document response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateDocument(
String documentId,
UpdateDocumentOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", "/documents/" + documentId, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Get log, calls the GET /logs/{logId} endpoint.
*
* @param logId Id of the log
* @return Log response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getLog(String logId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/logs/" + logId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List logs, calls the GET /logs endpoint.
*
* @see ListLogsOptions
* @param options Additional options to pass along as query parameters
* @return Logs response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listLogs(
ListLogsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/logs", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List logs, calls the GET /logs endpoint.
*
* @return Logs response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listLogs() throws IOException, APIException, MissingAccessTokenException {
return this.listLogs(null);
}
/**
* Create a model, calls the POST /models endpoint.
*
* @see CreateModelOptions
* @see FieldConfig
* @param width The number of pixels to be used for the input image width of your model
* @param height The number of pixels to be used for the input image height of your model
* @param fieldConfig Specification of the fields that the model is going to predict
* @param options Additional options to include in request body
* @return Model response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createModel(
int width,
int height,
FieldConfig fieldConfig,
CreateModelOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("width", width);
body.put("height", height);
body.put("fieldConfig", fieldConfig.toJson());
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/models", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a model, calls the POST /models endpoint.
*
* @see FieldConfig
* @param width The number of pixels to be used for the input image width of your model
* @param height The number of pixels to be used for the input image height of your model
* @param fieldConfig Specification of the fields that the model is going to predict
* @return Model response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createModel(
int width,
int height,
FieldConfig fieldConfig
) throws IOException, APIException, MissingAccessTokenException {
return this.createModel(width, height, fieldConfig, null);
}
/**
* Updates a model, calls the PATCH /models/{modelId} endpoint.
*
* @see UpdateModelOptions
* @param modelId Id of the model
* @param options Additional options to include in request body
* @return Model response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateModel(
String modelId,
UpdateModelOptions options
) throws IOException, APIException, MissingAccessTokenException {
String path = "/models/" + modelId;
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", path, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Get model, calls the GET /models/{modelId} endpoint.
*
* @param modelId Id of the model
* @return Model response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getModel(String modelId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/models/" + modelId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List models, calls the GET /models endpoint.
*
* @see ListModelsOptions
* @param options Additional options to pass along as query parameters
* @return Models response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listModels(
ListModelsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/models", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List models available, calls the GET /models endpoint.
*
* @return Models response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listModels() throws IOException, APIException, MissingAccessTokenException {
return listModels(null);
}
/**
* Create a prediction on a document using specified model, calls the POST /predictions endpoint.
*
* @see Client#createDocument
* @see CreatePredictionOptions
* @param documentId The document id to run inference and create a prediction on.
* @param modelId The id of the model to use for inference
* @param options Additional options to include in request body
* @return Prediction response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createPrediction(
String documentId,
String modelId,
CreatePredictionOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("documentId", documentId);
body.put("modelId", modelId);
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/predictions", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a prediction on a document using specified model, calls the POST /predictions endpoint.
*
* @see Client#createDocument
* @param documentId The document id to run inference and create a prediction on.
* @param modelId The id of the model to use for inference
* @return Prediction response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createPrediction(
String documentId,
String modelId
) throws IOException, APIException, MissingAccessTokenException {
return this.createPrediction(documentId, modelId, null);
}
/**
* List predictions available, calls the GET /predictions endpoint.
*
* @see ListPredictionsOptions
* @param options Additional options to pass along as query parameters
* @return Predictions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listPredictions(
ListPredictionsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/predictions", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List predictions available, calls the GET /predictions endpoint.
*
* @return Predictions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listPredictions() throws IOException, APIException, MissingAccessTokenException {
return this.listPredictions(null);
}
/**
* Create secret, calls the POST /secrets endpoint.
*
* @see CreateSecretOptions
* @param data Key-Value pairs to store secretly
* @param options Additional options to include in request body
* @return Secret response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createSecret(
JSONObject data,
CreateSecretOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject(){{ put("data", data); }};
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/secrets", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a secret, calls the POST /secrets endpoint.
*
* @see CreateSecretOptions
* @param data Key-Value pairs to store secretly
* @param options Additional options to include in request body
* @return Secret response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createSecret(
Map<String, String> data,
CreateSecretOptions options
) throws IOException, APIException, MissingAccessTokenException {
return this.createSecret(new JSONObject(data), options);
}
/**
* Create a secret, calls the POST /secrets endpoint.
*
* @param data Key-Value pairs to store secretly
* @return Secret response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createSecret(
Map<String, String> data
) throws IOException, APIException, MissingAccessTokenException {
return this.createSecret(data, null);
}
/**
* Create a secret, calls the POST /secrets endpoint.
*
* @param data Key-Value pairs to store secretly
* @return Secret response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createSecret(
JSONObject data
) throws IOException, APIException, MissingAccessTokenException {
return this.createSecret(data, null);
}
/**
* List secrets, calls the GET /secrets endpoint.
*
* @see ListSecretsOptions
* @param options Additional options to pass along as query parameters
* @return Secrets response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listSecrets(
ListSecretsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/secrets", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List secrets, calls the GET /secrets endpoint.
*
* @return Secrets response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listSecrets() throws IOException, APIException, MissingAccessTokenException {
return this.listSecrets(null);
}
/**
* Update a secret, calls the PATCH /secrets/{secretId} endpoint.
*
* @see UpdateSecretOptions
* @param secretId Id of the secret
* @param options Additional options to include in request body
* @return Secret response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateSecret(
String secretId,
UpdateSecretOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", "/secrets/" + secretId, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Delete a secret, calls the DELETE /secrets/{secretId} endpoint.
*
* @param secretId Id of the secret
* @return Secret response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteSecret(
String secretId
) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/secrets/" + secretId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Create a transition, calls the POST /transitions endpoint.
*
* @see CreateTransitionOptions
* @see TransitionType
* @param transitionType Type of transition
* @param options Additional options to include in request body
* @return Transition response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createTransition(
TransitionType transitionType,
CreateTransitionOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("transitionType", transitionType.value);
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/transitions", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a transition, calls the POST /transitions endpoint.
*
* @see TransitionType
* @param transitionType Type of transition
* @return Transition response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createTransition(
TransitionType transitionType
) throws IOException, APIException, MissingAccessTokenException {
return this.createTransition(transitionType, null);
}
/**
* List transitions, calls the GET /transitions endpoint.
*
* @see ListTransitionsOptions
* @param options Additional options to pass along as query parameters
* @return Transitions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listTransitions(ListTransitionsOptions options)
throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/transitions", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List transitions, calls the GET /transitions endpoint.
*
* @return Transitions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listTransitions() throws IOException, APIException, MissingAccessTokenException {
return this.listTransitions(null);
}
/**
* Get transition, calls the GET /transitions/{transitionId} endpoint.
*
* @param transitionId Id of the transition
* @return Transition response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getTransition(String transitionId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/transitions/" + transitionId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Updates a transition, calls the PATCH /transitions/{transitionId} endpoint.
*
* @see UpdateTransitionOptions
* @param transitionId Id of the transition
* @param options Additional options to include in request body
* @return Transition response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateTransition(
String transitionId,
UpdateTransitionOptions options
) throws IOException, APIException, MissingAccessTokenException {
String path = "/transitions/" + transitionId;
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", path, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Start executing a manual transition, calls the POST /transitions/{transitionId}/executions endpoint.
*
* @param transitionId Id of the transition
* @return TransitionExecution response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject executeTransition(
String transitionId
) throws IOException, APIException, MissingAccessTokenException {
String path = "/transitions/" + transitionId + "/executions";
HttpUriRequest request = this.createAuthorizedRequest("POST", path, new JSONObject());
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Delete a transition, calls the DELETE /transitions/{transitionId} endpoint.
* Will fail if transition is in use by one or more workflows.
*
* @param transitionId Id of the transition
* @return Transition response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteTransition(
String transitionId
) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/transitions/" + transitionId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List executions in a transition, calls the GET /transitions/{transitionId}/executions endpoint.
*
* @see ListTransitionExecutionsOptions
* @param transitionId Id of the transition
* @param options Additional options to pass along as query parameters
* @return Transition executions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listTransitionExecutions(String transitionId, ListTransitionExecutionsOptions options)
throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
String path = "/transitions/" + transitionId + "/executions";
HttpUriRequest request = this.createAuthorizedRequest("GET", path, queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List executions in a transition, calls the GET /transitions/{transitionId}/executions endpoint.
*
* @param transitionId Id of the transition
* @return Transition executions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listTransitionExecutions(String transitionId)
throws IOException, APIException, MissingAccessTokenException {
return this.listTransitionExecutions(transitionId, null);
}
/**
* Get an execution of a transition, calls the GET /transitions/{transitionId}/executions/{executionId} endpoint
*
* @param transitionId Id of the transition
* @param executionId Id of the execution
* @return TransitionExecution response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getTransitionExecution(String transitionId, String executionId)
throws IOException, APIException, MissingAccessTokenException {
String path = "/transitions/" + transitionId + "/executions/" + executionId;
HttpUriRequest request = this.createAuthorizedRequest("GET", path);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Ends the processing of the transition execution,
* calls the PATCH /transitions/{transitionId}/executions/{executionId} endpoint.
*
* @see UpdateTransitionExecutionOptions
* @see TransitionExecutionStatus
* @param transitionId Id of the transition
* @param executionId Id of the execution
* @param status Status of the execution
* @param options Additional options to include in request body
* @return Transition response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateTransitionExecution(
String transitionId,
String executionId,
TransitionExecutionStatus status,
UpdateTransitionExecutionOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("status", status.value);
body = options.addOptions(body);
String path = "/transitions/" + transitionId + "/executions/" + executionId;
HttpUriRequest request = this.createAuthorizedRequest("PATCH", path, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Send heartbeat for a manual execution to signal that we are still working on it.
* Must be done at minimum once every 60 seconds or the transition execution will time out,
* calls the POST /transitions/{transitionId}/executions/{executionId}/heartbeats endpoint.
*
* @param transitionId Id of the transition
* @param executionId Id of the execution
* @return Empty response
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject sendHeartbeat(
String transitionId,
String executionId
) throws IOException, APIException, MissingAccessTokenException {
String path = "/transitions/" + transitionId + "/executions/" + executionId + "/heartbeats";
HttpUriRequest request = this.createAuthorizedRequest("POST", path, new JSONObject());
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a user, calls the POST /users endpoint.
*
* @see CreateUserOptions
* @param email Email of the new user
* @param options Additional options to include in request body
* @return User response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createUser(
String email,
CreateUserOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("email", email);
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/users", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Create a user, calls the POST /users endpoint.
*
* @param email Email to the new user
* @return User response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createUser(String email) throws IOException, APIException, MissingAccessTokenException {
return this.createUser(email, null);
}
/**
* List users, calls the GET /users endpoint.
*
* @see ListUsersOptions
* @param options Additional options to pass along as query parameters
* @return Users response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listUsers(
ListUsersOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/users", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List users, calls the GET /users endpoint.
*
* @return Users response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listUsers() throws IOException, APIException, MissingAccessTokenException {
return this.listUsers(null);
}
/**
* Get user, calls the GET /users/{userId} endpoint.
*
* @param userId Id of user
* @return User response
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getUser(String userId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/users/" + userId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Updates a user, calls the PATCH /users/{userId} endpoint.
*
* @see UpdateUserOptions
* @param userId Id of user
* @param options Additional options to include in request body
* @return User response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateUser(
String userId,
UpdateUserOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", "/users/" + userId, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Delete a user, calls the PATCH /users/{userId} endpoint.
*
* @param userId Id of user
* @return User response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteUser(String userId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("DELETE", "/users/" + userId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Creates a new workflow, calls the POST /workflows endpoint.
* Check out Lucidtech's tutorials for more info on how to create a workflow.
* see https://docs.lucidtech.ai/getting-started/tutorials/setup_predict_and_approve
*
* @see CreateWorkflowOptions
* @param specification Specification of the workflow,
* currently supporting ASL: https://states-language.net/spec.html. Check out the tutorials for more information:
* see https://docs.lucidtech.ai/getting-started/tutorials/setup_predict_and_approve#creating-the-workflow
* @param options Additional options to include in request body
* @return Workflow response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createWorkflow(
JSONObject specification,
CreateWorkflowOptions options
) throws IOException, APIException, MissingAccessTokenException {
JSONObject body = new JSONObject();
body.put("specification", specification);
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("POST", "/workflows", body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Creates a new workflow, calls the POST /workflows endpoint.
* Check out Lucidtech's tutorials for more info on how to create a workflow.
* see https://docs.lucidtech.ai/getting-started/tutorials/setup_predict_and_approve
*
* @param specification Specification of the workflow,
* currently supporting ASL: https://states-language.net/spec.html. Check out the tutorials for more information:
* see https://docs.lucidtech.ai/getting-started/tutorials/setup_predict_and_approve#creating-the-workflow
* @return Workflow response from API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject createWorkflow(
JSONObject specification
) throws IOException, APIException, MissingAccessTokenException {
return this.createWorkflow(specification, null);
}
/**
* List workflows, calls the GET /workflows endpoint.
*
* @see ListWorkflowsOptions
* @param options Additional options to pass along as query parameters
* @return Workflows response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listWorkflows(
ListWorkflowsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
HttpUriRequest request = this.createAuthorizedRequest("GET", "/workflows", queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List workflows, calls the GET /workflows endpoint.
*
* @return Workflows response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listWorkflows() throws IOException, APIException, MissingAccessTokenException {
return this.listWorkflows(null);
}
/**
* Get workflow, calls the GET /workflows/{workflowId} endpoint.
*
* @param workflowId Id of the workflow
* @return Workflow response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject getWorkflow(String workflowId) throws IOException, APIException, MissingAccessTokenException {
HttpUriRequest request = this.createAuthorizedRequest("GET", "/workflows/" + workflowId);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Update a workflow, calls the PATCH /workflows/{workflowId} endpoint.
*
* @see UpdateWorkflowOptions
* @param workflowId Id of the workflow
* @param options Additional options to include in request body
* @return Workflow response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject updateWorkflow(
String workflowId,
UpdateWorkflowOptions options
) throws IOException, APIException, MissingAccessTokenException {
String path = "/workflows/" + workflowId;
JSONObject body = new JSONObject();
this.addOptions(body, options);
HttpUriRequest request = this.createAuthorizedRequest("PATCH", path, body);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* Delete a workflow, calls the DELETE /workflows/{workflowId} endpoint.
*
* @see Client#createWorkflow
* @param workflowId Id of the workflow
* @return Workflow response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteWorkflow(String workflowId) throws IOException, APIException, MissingAccessTokenException {
String path = "/workflows/" + workflowId;
HttpUriRequest request = this.createAuthorizedRequest("DELETE", path);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* Start a workflow execution, calls the POST /workflows/{workflowId}/executions endpoint.
*
* @param workflowId Id of the workflow
* @param content Input to the first step of the workflow
* @return WorkflowExecution response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject executeWorkflow(
String workflowId,
JSONObject content
) throws IOException, APIException, MissingAccessTokenException {
String path = "/workflows/" + workflowId + "/executions";
HttpUriRequest request = this.createAuthorizedRequest("POST", path, content);
String jsonResponse = this.executeRequest(request);
return new JSONObject(jsonResponse);
}
/**
* List executions in a workflow, calls the GET /workflows/{workflowId}/executions endpoint.
*
* @see ListWorkflowExecutionsOptions
* @param workflowId Id of the workflow
* @param options Additional options to pass along as query parameters
* @return Workflow executions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listWorkflowExecutions(
String workflowId,
ListWorkflowExecutionsOptions options
) throws IOException, APIException, MissingAccessTokenException {
List<NameValuePair> queryParameters = getQueryParameters(options);
String path = "/workflows/" + workflowId + "/executions";
HttpUriRequest request = this.createAuthorizedRequest("GET", path, queryParameters);
String response = this.executeRequest(request);
return new JSONObject(response);
}
/**
* List executions in a workflow, calls the GET /workflows/{workflowId}/executions endpoint.
*
* @param workflowId Id of the workflow
* @return Workflow executions response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject listWorkflowExecutions(
String workflowId
) throws IOException, APIException, MissingAccessTokenException {
return this.listWorkflowExecutions(workflowId, null);
}
/**
* Delete execution from workflow,
* calls the DELETE /workflows/{workflowId}/executions/{executionId} endpoint.
*
* @see Client#executeWorkflow
* @param workflowId Id of the workflow
* @param executionId Id of the execution
* @return WorkflowExecution response from REST API
* @throws IOException General IOException
* @throws APIException Raised when API returns an erroneous status code
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public JSONObject deleteWorkflowExecution(
String workflowId,
String executionId
) throws IOException, APIException, MissingAccessTokenException {
String path = "/workflows/" + workflowId + "/executions/" + executionId;
HttpUriRequest request = this.createAuthorizedRequest("DELETE", path);
String response = this.executeRequest(request);
return new JSONObject(response);
}
private String executeRequest(HttpUriRequest request) throws IOException, APIException {
HttpResponse httpResponse = this.httpClient.execute(request);
HttpEntity responseEntity = httpResponse.getEntity();
StatusLine statusLine = httpResponse.getStatusLine();
int status = statusLine.getStatusCode();
if (status == HttpStatus.SC_NO_CONTENT) {
JSONObject response = new JSONObject();
response.put("Your request executed successfully", String.valueOf(status));
return response.toString();
}
else if (status == HttpStatus.SC_FORBIDDEN) {
throw new APIException("Credentials provided are not valid");
}
else if (status == 429) {
throw new APIException("You have reached the limit of requests per second");
}
else if (status > 299) {
try {
String message = EntityUtils.toString(responseEntity);
JSONObject jsonResponse = new JSONObject(message);
throw new APIException(status, jsonResponse.getString("message"));
} catch (JSONException ex) {
throw new APIException(status, statusLine.getReasonPhrase());
}
}
return EntityUtils.toString(responseEntity);
}
private URI createUri(String path) throws URISyntaxException {
String apiEndpoint = this.credentials.getApiEndpoint();
return new URI(apiEndpoint + path);
}
private URI createUri(String path, List<NameValuePair> queryParams) throws URISyntaxException {
URI uri;
String apiEndpoint = this.credentials.getApiEndpoint();
uri = new URI(apiEndpoint + path);
URIBuilder builder = new URIBuilder(uri);
builder.addParameters(queryParams);
return builder.build();
}
private HttpUriRequest createAuthorizedRequest(String method, String path) throws MissingAccessTokenException {
URI uri;
try {
uri = this.createUri(path);
} catch (URISyntaxException ex) {
ex.printStackTrace();
throw new RuntimeException("Failed to create url");
}
HttpUriRequest request;
switch (method) {
case "GET": {
request = new HttpGet(uri);
} break;
case "DELETE": {
request = new HttpDelete(uri);
} break;
default: throw new IllegalArgumentException("HTTP verb not supported: " + method);
}
request.addHeader("Content-Type", "application/json");
request.addHeader("Authorization", "Bearer " + this.credentials.getAccessToken(this.httpClient));
return request;
}
private HttpUriRequest createAuthorizedRequest(
String method,
String path,
List<NameValuePair> queryParams
) throws MissingAccessTokenException {
URI uri;
try {
uri = this.createUri(path, queryParams);
} catch (URISyntaxException ex) {
ex.printStackTrace();
throw new RuntimeException("Failed to create url");
}
HttpUriRequest request;
switch (method) {
case "GET": {
request = new HttpGet(uri);
} break;
case "DELETE": {
request = new HttpDelete(uri);
} break;
default: throw new IllegalArgumentException("HTTP verb not supported: " + method);
}
request.addHeader("Content-Type", "application/json");
request.addHeader("Authorization", "Bearer " + this.credentials.getAccessToken(this.httpClient));
return request;
}
private HttpUriRequest createAuthorizedRequest(
String method,
String path,
JSONObject jsonBody
) throws MissingAccessTokenException {
URI uri;
try {
uri = this.createUri(path);
} catch (URISyntaxException ex) {
ex.printStackTrace();
throw new RuntimeException("Failed to create url");
}
HttpUriRequest request;
byte[] body = null;
switch (method) {
case "GET": {
request = new HttpGet(uri);
} break;
case "DELETE": {
request = new HttpDelete(uri);
} break;
case "PATCH": {
request = new HttpPatch(uri);
body = jsonBody.toString().getBytes();
ByteArrayEntity entity = new ByteArrayEntity(body);
((HttpPatch) request).setEntity(entity);
} break;
case "POST": {
request = new HttpPost(uri);
body = jsonBody.toString().getBytes();
ByteArrayEntity entity = new ByteArrayEntity(body);
((HttpPost) request).setEntity(entity);
} break;
default: throw new IllegalArgumentException("HTTP verb not supported: " + method);
}
request.addHeader("Content-Type", "application/json");
request.addHeader("Authorization", "Bearer " + this.credentials.getAccessToken(this.httpClient));
return request;
}
private JSONObject addOptions(JSONObject body, Options options) {
if (options != null) {
body = options.addOptions(body);
}
return body;
}
private List<NameValuePair> getQueryParameters(ListResourcesOptions options) {
List<NameValuePair> parameters = new ArrayList<NameValuePair>();
if (options != null) {
parameters = options.addOptions(parameters);
}
return parameters;
}
private List<NameValuePair> getQueryParameters(DeleteResourcesOptions options) {
List<NameValuePair> parameters = new ArrayList<NameValuePair>();
if (options != null) {
parameters = options.addOptions(parameters);
}
return parameters;
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/ContentType.java
|
package ai.lucidtech.las.sdk;
public enum ContentType {
JPEG("image/jpeg"),
PDF("application/pdf"),
TIFF("image/tiff"),
PNG("image/png");
private String mimeType;
ContentType(String mimeType) {
this.mimeType = mimeType;
}
public String getMimeType() {
return this.mimeType;
}
public static ContentType fromString(String mimeType) {
for (ContentType contentType : ContentType.values()) {
if (contentType.getMimeType().equals(mimeType)) {
return contentType;
}
}
throw new IllegalArgumentException("No enum matching mime type: " + mimeType);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateAppClientOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONObject;
public class CreateAppClientOptions extends NameAndDescriptionOptions<CreateAppClientOptions> {
private String[] callbackUrls;
private String[] logoutUrls;
private String[] loginUrls;
private String defaultLoginUrl;
private Boolean generateSecret;
public CreateAppClientOptions setCallbackUrls(String[] callbackUrls) {
this.callbackUrls = callbackUrls;
return this;
}
public CreateAppClientOptions setLogoutUrls(String[] logoutUrls) {
this.logoutUrls = logoutUrls;
return this;
}
public CreateAppClientOptions setLoginUrls(String[] loginUrls) {
this.loginUrls = loginUrls;
return this;
}
public CreateAppClientOptions setDefaultLoginUrl(String defaultLoginUrl) {
this.defaultLoginUrl = defaultLoginUrl;
return this;
}
public CreateAppClientOptions setGenerateSecret(Boolean generateSecret) {
this.generateSecret = generateSecret;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "callbackUrls", this.callbackUrls);
this.addOption(body, "logoutUrls", this.logoutUrls);
this.addOption(body, "loginUrls", this.loginUrls);
this.addOption(body, "defaultLoginUrl", this.defaultLoginUrl);
this.addOption(body, "generateSecret", this.generateSecret);
return super.addOptions(body);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateAssetOptions.java
|
package ai.lucidtech.las.sdk;
public class CreateAssetOptions extends NameAndDescriptionOptions<CreateAssetOptions> {}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateDocumentOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONArray;
import org.json.JSONObject;
public class CreateDocumentOptions extends Options {
private String consentId;
private String datasetId;
private JSONArray groundTruth;
private Integer retentionInDays;
public CreateDocumentOptions setConsentId(String consentId) {
this.consentId = consentId;
return this;
}
public CreateDocumentOptions setDatasetId(String datasetId) {
this.datasetId = datasetId;
return this;
}
public CreateDocumentOptions setGroundTruth(JSONArray groundTruth) {
this.groundTruth = groundTruth;
return this;
}
public CreateDocumentOptions setRetentionInDays(Integer retentionInDays) {
this.retentionInDays = retentionInDays;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "consentId", this.consentId);
this.addOption(body, "datasetId", this.datasetId);
this.addOption(body, "groundTruth", this.groundTruth);
this.addOption(body, "retentionInDays", this.retentionInDays);
return body;
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateModelOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONObject;
public class CreateModelOptions extends NameAndDescriptionOptions<CreateModelOptions> {
private PreprocessConfig preprocessConfig;
public CreateModelOptions setPreprocessConfig(PreprocessConfig preprocessConfig) {
this.preprocessConfig = preprocessConfig;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "preprocessConfig", this.preprocessConfig);
return super.addOptions(body);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreatePredictionOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONObject;
public class CreatePredictionOptions extends Options {
private Integer maxPages;
private Boolean autoRotate;
private ImageQuality imageQuality;
public CreatePredictionOptions setMaxPages(int maxPages) {
this.maxPages = maxPages;
return this;
}
public CreatePredictionOptions setAutoRotate(boolean autoRotate) {
this.autoRotate = autoRotate;
return this;
}
public CreatePredictionOptions setImageQuality(ImageQuality imageQuality) {
this.imageQuality = imageQuality;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "maxPages", this.maxPages);
this.addOption(body, "autoRotate", this.autoRotate);
this.addOption(body, "imageQuality", this.imageQuality.value);
return body;
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateSecretOptions.java
|
package ai.lucidtech.las.sdk;
public class CreateSecretOptions extends NameAndDescriptionOptions<CreateSecretOptions> {}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateTransitionOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONObject;
public class CreateTransitionOptions extends NameAndDescriptionOptions<CreateTransitionOptions> {
private TransitionParameters parameters;
private JSONObject inputJsonSchema;
private JSONObject outputJsonSchema;
public CreateTransitionOptions setParameters(TransitionParameters parameters) {
this.parameters = parameters;
return this;
}
public CreateTransitionOptions setInputJsonSchema(JSONObject inputJsonSchema) {
this.inputJsonSchema = inputJsonSchema;
return this;
}
public CreateTransitionOptions setOutputJsonSchema(JSONObject outputJsonSchema) {
this.outputJsonSchema = outputJsonSchema;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "parameters", this.parameters);
this.addOption(body, "inputJsonSchema", this.inputJsonSchema);
this.addOption(body, "outputJsonSchema", this.outputJsonSchema);
return super.addOptions(body);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateUserOptions.java
|
package ai.lucidtech.las.sdk;
public class CreateUserOptions extends UserOptions<CreateUserOptions> {}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/CreateWorkflowOptions.java
|
package ai.lucidtech.las.sdk;
import org.json.JSONObject;
public class CreateWorkflowOptions extends NameAndDescriptionOptions<CreateWorkflowOptions> {
private WorkflowCompletedConfig completedConfig;
private WorkflowErrorConfig errorConfig;
public CreateWorkflowOptions setCompletedConfig(WorkflowCompletedConfig completedConfig) {
this.completedConfig = completedConfig;
return this;
}
public CreateWorkflowOptions setErrorConfig(WorkflowErrorConfig errorConfig) {
this.errorConfig = errorConfig;
return this;
}
public JSONObject addOptions(JSONObject body) {
this.addOption(body, "completedConfig", this.completedConfig);
this.addOption(body, "errorConfig", this.errorConfig);
return super.addOptions(body);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/Credentials.java
|
package ai.lucidtech.las.sdk;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.Base64;
import org.apache.http.HttpResponse;
import org.apache.http.HttpEntity;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.util.EntityUtils;
import org.json.JSONObject;
public class Credentials {
private String clientId;
private String clientSecret;
private String authEndpoint;
private String apiEndpoint;
private String accessToken;
private long expires;
/**
* Used to fetch and store credentials.
*
* @param clientId Client id
* @param clientSecret Client secret
* @param authEndpoint Auth endpoint
* @param apiEndpoint Domain endpoint of the api, e.g. https://{prefix}.api.lucidtech.ai/{version}
* @throws MissingCredentialsException Raised if some of credentials are missing
*/
public Credentials(
String clientId,
String clientSecret,
String authEndpoint,
String apiEndpoint
) throws MissingCredentialsException {
this.validateCredentials(clientId, clientSecret, authEndpoint, apiEndpoint);
this.clientId = clientId;
this.clientSecret = clientSecret;
this.authEndpoint = authEndpoint;
this.apiEndpoint = apiEndpoint;
this.accessToken = null;
this.expires = 0;
}
/**
* @param httpClient Instance of HttpClient used to access the authentication endpoint
* @return Access token, downloading it if necessary
* @throws MissingAccessTokenException Raised if access token cannot be obtained
*/
public String getAccessToken(HttpClient httpClient) throws MissingAccessTokenException {
if (this.accessToken == null || this.accessToken.isEmpty() || this.expires < Instant.now().getEpochSecond()) {
try {
JSONObject tokenData = this.getClientCredentials(httpClient);
this.accessToken = tokenData.getString("access_token");
this.expires = Instant.now().getEpochSecond() + tokenData.getInt("expires_in");
} catch (IOException | RuntimeException ex) {
throw new MissingAccessTokenException();
}
}
return this.accessToken;
}
public String getApiEndpoint() {
return apiEndpoint;
}
private void validateCredentials(String ...credentials) throws MissingCredentialsException {
for (String value : credentials) {
if (value == null) {
throw new MissingCredentialsException();
}
}
}
private JSONObject getClientCredentials(HttpClient httpClient) throws IOException {
HttpUriRequest request = new HttpPost("https://" + this.authEndpoint + "/oauth2/token?grant_type=client_credentials");
request.addHeader("Content-Type", "application/x-www-form-urlencoded");
request.addHeader("Accept", "application/json");
String authString = this.clientId + ":" + this.clientSecret;
String encodedAuth = Base64.getEncoder().encodeToString(authString.getBytes(StandardCharsets.UTF_8));
request.addHeader("Authorization", "Basic " + encodedAuth);
HttpResponse response = httpClient.execute(request);
HttpEntity responseEntity = response.getEntity();
int status = response.getStatusLine().getStatusCode();
if (status != 200) {
throw new RuntimeException("Failed to fetch access token: HTTP response code " + status);
}
String body = EntityUtils.toString(responseEntity);
JSONObject jsonResponse = new JSONObject(body);
if (!jsonResponse.has("access_token") || !jsonResponse.has("expires_in")) {
throw new RuntimeException("Failed to fetch access token: invalid response body");
}
return jsonResponse;
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/DeleteDocumentsOptions.java
|
package ai.lucidtech.las.sdk;
import org.apache.http.NameValuePair;
import java.util.ArrayList;
import java.util.List;
public class DeleteDocumentsOptions extends DeleteResourcesOptions<DeleteDocumentsOptions> {
private String[] datasetId;
private String[] consentId;
public DeleteDocumentsOptions setConsentId(String[] consentId) {
this.consentId = consentId;
return this;
}
public DeleteDocumentsOptions setDatasetId(String[] datasetId) {
this.datasetId = datasetId;
return this;
}
public List<NameValuePair> addOptions(List<NameValuePair> parameters) {
this.addOption(parameters, "datasetId", this.datasetId);
this.addOption(parameters, "consentId", this.consentId);
return super.addOptions(parameters);
}
}
|
0
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las
|
java-sources/ai/lucidtech/las-sdk-java/4.0.0/ai/lucidtech/las/sdk/DeleteResourcesOptions.java
|
package ai.lucidtech.las.sdk;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.NameValuePair;
import java.util.ArrayList;
import java.util.List;
public class DeleteResourcesOptions<T> {
protected Integer maxResults;
protected String nextToken;
public T setMaxResults(int maxResults) {
this.maxResults = maxResults;
return (T) this;
}
public T setNextToken(String nextToken) {
this.nextToken = nextToken;
return (T) this;
}
protected void addOption(List<NameValuePair> parameters, String key, String value) {
if (value != null) {
parameters.add(new BasicNameValuePair(key, value));
}
}
protected void addOption(List<NameValuePair> parameters, String key, String[] value) {
if (value != null) {
for (String v : value) {
parameters.add(new BasicNameValuePair(key, v));
}
}
}
protected void addOption(List<NameValuePair> parameters, String key, Integer value) {
if (value != null) {
parameters.add(new BasicNameValuePair(key, Integer.toString(value)));
}
}
public List<NameValuePair> addOptions(List<NameValuePair> parameters) {
this.addOption(parameters, "maxResults", this.maxResults);
this.addOption(parameters, "nextToken", this.nextToken);
return parameters;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.