index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/problemtransformers/GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer.java
package ai.libs.jaicore.search.problemtransformers; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction; import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath; import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput; public abstract class GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<N, A, V extends Comparable<V>> implements AlgorithmicProblemReduction<IPathSearchInput<N, A>, EvaluatedSearchGraphPath<N, A, V>, GraphSearchWithSubpathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>> { private IPathEvaluator<N, A, V> nodeEvaluator; public GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer() { super(); } public GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer(final IPathEvaluator<N, A, V> nodeEvaluator) { super(); this.nodeEvaluator = nodeEvaluator; } @Override public GraphSearchWithSubpathEvaluationsInput<N, A, V> encodeProblem(final IPathSearchInput<N, A> problem) { if (this.nodeEvaluator == null) { throw new IllegalStateException("Cannot create problem since node evaluator has not been set, yet."); } return new GraphSearchWithSubpathEvaluationsInput<>(problem, this.nodeEvaluator); } public GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<N, A, V> setNodeEvaluator(final IPathEvaluator<N, A, V> nodeEvaluator) { this.nodeEvaluator = nodeEvaluator; return this; } public IPathEvaluator<N, A, V> getNodeEvaluator() { return this.nodeEvaluator; } @Override public EvaluatedSearchGraphPath<N, A, V> decodeSolution(final EvaluatedSearchGraphPath<N, A, V> solution) { return solution; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/problemtransformers/GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS.java
package ai.libs.jaicore.search.problemtransformers; import java.util.Random; import java.util.function.Predicate; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.AlternativeNodeEvaluator; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.CoveringNodeEvaluator; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.RandomCompletionBasedNodeEvaluator; import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput; /** * Takes a path search problem and uses the path evaluator as the evaluator within the random completion based node evaluator. * * @author Felix Mohr * * @param <N> node type * @param <A> arc type * @param <V> type of node evaluations */ public class GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS<N, A, V extends Comparable<V>> extends GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<N, A, V> { private final IPathEvaluator<N, A, V> preferredNodeEvaluator; // this is a path evaluator that is executed PRIOR to the random completion private final Predicate<N> prioritizedNodesInRandomCompletion; // the predicate passed to the RandomCompletionNodeEvaluator for preferred paths private IPathEvaluator<N, A, V> coveringNodeEvaluator; // this is a path evaluator that is executed AFTER the random completion (and only if the preferred NE returned NULL), and its result is actually returned. If not NULL, the random // completion is only used to probe solutions and propagate their score via the event bus. private final Random random; private final int numSamples; private final int timeoutForSingleCompletionEvaluationInMS; private final int timeoutForNodeEvaluationInMS; /** * * @param preferredNodeEvaluator Node evaluator that should be used prior to adopting random completions * @param prioritizedNodesInRandomCompletion Predicate that evaluates to true for nodes that should be perferred when drawing random completions * @param random Random source * @param numSamples Number of random completions * @param timeoutForSingleCompletionEvaluationInMS * @param timeoutForNodeEvaluationInMS */ public GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS(final IPathEvaluator<N, A, V> preferredNodeEvaluator, final Predicate<N> prioritizedNodesInRandomCompletion, final Random random, final int numSamples, final int timeoutForSingleCompletionEvaluationInMS, final int timeoutForNodeEvaluationInMS) { super(); if (numSamples <= 0) { throw new IllegalArgumentException("Sample size has been set to " + numSamples + " but must be strictly positive!"); } this.preferredNodeEvaluator = preferredNodeEvaluator; this.prioritizedNodesInRandomCompletion = prioritizedNodesInRandomCompletion; this.random = random; this.numSamples = numSamples; this.timeoutForSingleCompletionEvaluationInMS = timeoutForSingleCompletionEvaluationInMS; this.timeoutForNodeEvaluationInMS = timeoutForNodeEvaluationInMS; } public IPathEvaluator<N, A, V> getPreferredNodeEvaluator() { return this.preferredNodeEvaluator; } public Predicate<N> getPrioritizedNodePredicatesForRandomCompletion() { return this.prioritizedNodesInRandomCompletion; } public int getNumSamples() { return this.numSamples; } @Override public GraphSearchWithSubpathEvaluationsInput<N, A, V> encodeProblem(final IPathSearchInput<N, A> problem) { if (!(problem instanceof IPathSearchWithPathEvaluationsInput)) { throw new IllegalArgumentException("Given problem must have path evaluation!"); } IPathSearchWithPathEvaluationsInput<N, A, V> cProblem = (IPathSearchWithPathEvaluationsInput<N, A, V>)problem; IPathEvaluator<N, A, V> mainEvaluator; RandomCompletionBasedNodeEvaluator<N, A, V> rdfsNodeEvaluator = new RandomCompletionBasedNodeEvaluator<>(this.random, this.numSamples, this.numSamples * 2, cProblem.getPathEvaluator(), this.timeoutForSingleCompletionEvaluationInMS, this.timeoutForNodeEvaluationInMS, this.prioritizedNodesInRandomCompletion); /* first check whether we have a covering node evaluator */ if (this.coveringNodeEvaluator != null) { mainEvaluator = new CoveringNodeEvaluator<>(rdfsNodeEvaluator, this.coveringNodeEvaluator); } else { mainEvaluator = rdfsNodeEvaluator; } /* now merge this main evaluator together with the preferred node evaluator */ if (this.preferredNodeEvaluator != null) { this.setNodeEvaluator(new AlternativeNodeEvaluator<>(this.preferredNodeEvaluator, mainEvaluator)); } else { this.setNodeEvaluator(mainEvaluator); } return super.encodeProblem(problem); } public IPathEvaluator<N, A, V> getCoveringNodeEvaluator() { return this.coveringNodeEvaluator; } public void setCoveringNodeEvaluator(final IPathEvaluator<N, A, V> coveringNodeEvaluator) { this.coveringNodeEvaluator = coveringNodeEvaluator; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/problemtransformers/GraphSearchProblemInputToGraphSearchWithSubpathEvaluationViaUninformedness.java
package ai.libs.jaicore.search.problemtransformers; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput; import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput; /** * * @author Felix Mohr * * @param <N> Node Type * @param <A> Arc Type * * This reduction will create a problem in which each inner node is evaluated with 0.0, and each leaf node with the value given by the path evaluator. * The given path evaluator is, by contract, just applicable to leaf nodes, so it is just used there, and everywhere else we use just a constant 0.0. */ public class GraphSearchProblemInputToGraphSearchWithSubpathEvaluationViaUninformedness<N, A> extends GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<N, A, Double> { @Override public GraphSearchWithSubpathEvaluationsInput<N, A, Double> encodeProblem(final IPathSearchInput<N, A> problem) { if (!(problem instanceof IPathSearchWithPathEvaluationsInput)) { throw new IllegalArgumentException("Given problem must be of type " + IPathSearchWithPathEvaluationsInput.class + " but is of " + problem.getClass()); } final IPathSearchWithPathEvaluationsInput<N, A, Double> cProblem = (IPathSearchWithPathEvaluationsInput<N, A, Double>) problem; this.setNodeEvaluator(p -> cProblem.getGoalTester().isGoal(p) ? cProblem.getPathEvaluator().evaluate(p) : 0.0); return super.encodeProblem(problem); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/problemtransformers/GraphSearchWithPathEvaluationsInputToGraphSearchWithSubpathEvaluationViaUninformedness.java
package ai.libs.jaicore.search.problemtransformers; import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.AlternativeNodeEvaluator; import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath; import ai.libs.jaicore.search.model.other.SearchGraphPath; import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput; public class GraphSearchWithPathEvaluationsInputToGraphSearchWithSubpathEvaluationViaUninformedness<N, A> implements AlgorithmicProblemReduction<IPathSearchWithPathEvaluationsInput<N, A, Double>, SearchGraphPath<N, A>, GraphSearchWithSubpathEvaluationsInput<N, A, Double>, EvaluatedSearchGraphPath<N, A, Double>> { @Override public GraphSearchWithSubpathEvaluationsInput<N, A, Double> encodeProblem(final IPathSearchWithPathEvaluationsInput<N, A, Double> problem) { IPathEvaluator<N, A, Double> evaluator = new AlternativeNodeEvaluator<>(new IPathEvaluator<N, A, Double>() { @Override public Double evaluate(final ILabeledPath<N, A> path) throws PathEvaluationException, InterruptedException { return problem.getGoalTester().isGoal(path) ? null : 0.0; // goal paths should be evaluated by the ground truth } }, problem.getPathEvaluator()); return new GraphSearchWithSubpathEvaluationsInput<>(problem, evaluator); } @Override public SearchGraphPath<N, A> decodeSolution(final EvaluatedSearchGraphPath<N, A, Double> solution) { return solution; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/structure
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/structure/graphgenerator/SubGraphGenerator.java
package ai.libs.jaicore.search.structure.graphgenerator; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import org.api4.java.datastructure.graph.implicit.ISingleRootGenerator; import org.api4.java.datastructure.graph.implicit.ISuccessorGenerator; /** * This is a graph generator that takes another graph generator and generates its sub-graph under a given root node * * @author fmohr * * @param <N> * @param <A> */ public class SubGraphGenerator<N, A> implements IGraphGenerator<N, A> { private final IGraphGenerator<N, A> actualGraphGenerator; private final N newRoot; public SubGraphGenerator(final IGraphGenerator<N, A> actualGraphGenerator, final N newRoot) { super(); this.actualGraphGenerator = actualGraphGenerator; this.newRoot = newRoot; } @Override public ISingleRootGenerator<N> getRootGenerator() { return () -> this.newRoot; } @Override public ISuccessorGenerator<N, A> getSuccessorGenerator() { return this.actualGraphGenerator.getSuccessorGenerator(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/ISyntheticGraphGeneratorBuilder.java
package ai.libs.jaicore.search.syntheticgraphs; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public interface ISyntheticGraphGeneratorBuilder { public IGraphGenerator<ITransparentTreeNode, Integer> build(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/ISyntheticTreasureIslandProblem.java
package ai.libs.jaicore.search.syntheticgraphs; import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; public interface ISyntheticTreasureIslandProblem extends IPathSearchWithPathEvaluationsInput<ITransparentTreeNode, Integer, Double> { public IIslandModel getIslandModel(); public int getExpectedNumberOfIslands(); // we don't allow BigInteger here, because this could not be tested anyway! public int getMaximumIslandSizes(); // we don't allow BigInteger here, because this could not be tested anyway! public int getMinimumIslandSizes(); // we don't allow BigInteger here, because this could not be tested anyway! public int getNumberOfTreasureIslands(); // we don't allow BigInteger here, because this could not be tested anyway! public boolean isPathATreasure(ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException; }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/SyntheticSearchProblemBuilder.java
package ai.libs.jaicore.search.syntheticgraphs; import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput; import org.api4.java.ai.graphsearch.problem.implicit.graphgenerator.INodeGoalTester; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.ITreasureModel; public class SyntheticSearchProblemBuilder { private ISyntheticGraphGeneratorBuilder ggBuilder; private IGraphGenerator<ITransparentTreeNode, Integer> graphGenerator; private ITreasureModel treasureModel; public SyntheticSearchProblemBuilder withGGBuilder(final ISyntheticGraphGeneratorBuilder graphGeneratorBuilder) { this.ggBuilder = graphGeneratorBuilder; return this; } public SyntheticSearchProblemBuilder withGraphGenerator(final IGraphGenerator<ITransparentTreeNode, Integer> graphGenerator) { this.ggBuilder = null; this.graphGenerator = graphGenerator; return this; } public SyntheticSearchProblemBuilder withTreasureModel(final ITreasureModel treasureModel) { this.treasureModel = treasureModel; return this; } public IPathSearchWithPathEvaluationsInput<ITransparentTreeNode, Integer, Double> build() { if (this.ggBuilder != null) { this.graphGenerator = this.ggBuilder.build(); } if (this.graphGenerator == null) { throw new IllegalStateException("Graph has not been set yet."); } if (this.treasureModel == null) { throw new IllegalStateException("TreasureModel has not been set yet."); } return new GraphSearchWithPathEvaluationsInput<>(this.graphGenerator, new INodeGoalTester<ITransparentTreeNode, Integer>() { @Override public boolean isGoal(final ITransparentTreeNode node) { return !node.hasChildren(); } }, this.treasureModel); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/ISyntheticSearchExperimentConfig.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import java.util.List; import org.aeonbits.owner.Config.Sources; import ai.libs.jaicore.basic.IOwnerBasedRandomConfig; import ai.libs.jaicore.db.IDatabaseConfig; import ai.libs.jaicore.experiments.IExperimentSetConfig; import ai.libs.jaicore.experiments.configurations.IAlgorithmMaxIterConfig; import ai.libs.jaicore.experiments.configurations.IAlgorithmNameConfig; @Sources({ "file:conf/synthetic-experiments.conf" }) public interface ISyntheticSearchExperimentConfig extends IExperimentSetConfig, IDatabaseConfig, IAlgorithmNameConfig, IAlgorithmMaxIterConfig, IOwnerBasedRandomConfig { public static final String K_BRANCHING = "branching"; public static final String K_DEPTH = "depth"; @Key(K_BRANCHING) public List<Integer> branchingFactors(); @Key(K_DEPTH) public List<Integer> depths(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/ITreasureIslandExperimentSetConfig.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import java.util.List; public interface ITreasureIslandExperimentSetConfig extends ISyntheticSearchExperimentConfig { public static final String K_ISLANDS_MAXISLANDSIZE = "maxislandsize"; public static final String K_ISLANDS_NUMBER_OF_TREASURES = "treasures"; public static final String K_TREASURE_MODEL = "treasuremodel"; @Key(K_ISLANDS_MAXISLANDSIZE) public List<Double> maxIslandSize(); @Key(K_ISLANDS_NUMBER_OF_TREASURES) public List<Integer> numbersOfTreasureIslands(); @Key(K_TREASURE_MODEL) public List<String> functions(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/SyntheticDomain.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import org.aeonbits.owner.ConfigCache; import ai.libs.jaicore.search.experiments.SearchExperimentDomain; import ai.libs.jaicore.search.syntheticgraphs.ISyntheticTreasureIslandProblem; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public class SyntheticDomain extends SearchExperimentDomain<TreasureIslandSearchExperimentBuilder, ISyntheticTreasureIslandProblem, ITransparentTreeNode, Integer> { public SyntheticDomain() { this(ConfigCache.getOrCreate(ISyntheticSearchExperimentConfig.class)); } public SyntheticDomain(final ISyntheticSearchExperimentConfig config) { super(config, new SyntheticExperimentDecoder(config)); } @Override public Class<TreasureIslandSearchExperimentBuilder> getBuilderClass() { return TreasureIslandSearchExperimentBuilder.class; } @Override public SyntheticExperimentDecoder getDecoder() { return (SyntheticExperimentDecoder)super.getDecoder(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/SyntheticExperimentDecoder.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.util.Map; import java.util.Random; import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearch; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IEvaluatedPath; import ai.libs.jaicore.experiments.Experiment; import ai.libs.jaicore.search.experiments.ASearchExperimentDecoder; import ai.libs.jaicore.search.experiments.StandardExperimentSearchAlgorithmFactory; import ai.libs.jaicore.search.syntheticgraphs.ISyntheticTreasureIslandProblem; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated.DegeneratedGraphSearchWithPathEvaluationsProblem; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.equalsized.EqualSizedIslandsModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.ITreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.LinearTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel.AbyssTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel.DominatedFunnelTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel.FunnelTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel.RelativeFunnelTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean.LinkedTreasureIslandPathCostGenerator; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean.NoisyMeanTreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean.ShiftedSineTreasureGenerator; public class SyntheticExperimentDecoder extends ASearchExperimentDecoder<ITransparentTreeNode, Integer, ISyntheticTreasureIslandProblem, IEvaluatedPath<ITransparentTreeNode, Integer, Double>, IOptimalPathInORGraphSearch<? extends ISyntheticTreasureIslandProblem, ? extends IEvaluatedPath<ITransparentTreeNode,Integer, Double>, ITransparentTreeNode, Integer, Double>> { public SyntheticExperimentDecoder(final ISyntheticSearchExperimentConfig config) { super(config); } @Override public ISyntheticTreasureIslandProblem getProblem(final Experiment experiment) { /* check validity of experiment */ this.checkThatAllKeyFieldsInExperimentAreDefined(experiment); /* read experiment data */ Map<String, String> experimentData = experiment.getValuesOfKeyFields(); int seed = Integer.parseInt(experimentData.get(ISyntheticSearchExperimentConfig.K_SEED)); int branchingFactor = Integer.parseInt(experimentData.get(ISyntheticSearchExperimentConfig.K_BRANCHING)); int depth = Integer.parseInt(experimentData.get(ISyntheticSearchExperimentConfig.K_DEPTH)); int maxIslandSizeRel = Integer.parseInt(experimentData.get(ITreasureIslandExperimentSetConfig.K_ISLANDS_MAXISLANDSIZE)); int numberOfIslandsWithTreasure = Integer.parseInt(experimentData.get(ITreasureIslandExperimentSetConfig.K_ISLANDS_NUMBER_OF_TREASURES)); /* derive number of leafs in total */ BigInteger numberOfLeafs = BigInteger.valueOf(branchingFactor).pow(depth); BigInteger maxIslandSizeAbs = new BigDecimal(numberOfLeafs).multiply(BigDecimal.valueOf(maxIslandSizeRel)).round(new MathContext(1, RoundingMode.FLOOR)).toBigInteger(); /* create graph search input */ IIslandModel islandModel = new EqualSizedIslandsModel(maxIslandSizeAbs); ITreasureModel treasureGenerator = this.getTreasureModel(islandModel, numberOfIslandsWithTreasure, new Random(seed), experimentData.get("treasuremodel")); return new DegeneratedGraphSearchWithPathEvaluationsProblem(new Random(seed), branchingFactor / 2, branchingFactor, depth, maxIslandSizeAbs.intValue(), numberOfIslandsWithTreasure, islandModel, treasureGenerator); } public ITreasureModel getTreasureModel(final IIslandModel islandModel, final int numberOfIslandsWithTreasure, final Random random, final String model) { switch (model) { case "abyss": return new AbyssTreasureModel(islandModel, numberOfIslandsWithTreasure, random); case "funnel": return new FunnelTreasureModel(islandModel, numberOfIslandsWithTreasure, random); case "relativefunnel": return new RelativeFunnelTreasureModel(islandModel, numberOfIslandsWithTreasure, random); case "dominatedfunnel": return new DominatedFunnelTreasureModel(islandModel, random); case "linear-asc": return new LinearTreasureModel(); case "linear-desc": return new LinearTreasureModel(false); default: throw new IllegalArgumentException("Model " + model + " is not supported."); } } public NoisyMeanTreasureModel getTreasureGenerator(final String function, final IIslandModel islandModel, final int numberOfTreasures) { switch (function.toLowerCase()) { case "sine": ShiftedSineTreasureGenerator linkFuction = new ShiftedSineTreasureGenerator(islandModel, numberOfTreasures, 0.1, 0.5); LinkedTreasureIslandPathCostGenerator treasureGenerator = new LinkedTreasureIslandPathCostGenerator(islandModel, linkFuction); return treasureGenerator; } throw new UnsupportedOperationException(); } @Override public IOptimalPathInORGraphSearch<? extends ISyntheticTreasureIslandProblem, ? extends IEvaluatedPath<ITransparentTreeNode, Integer, Double>, ITransparentTreeNode, Integer, Double> getAlgorithm(final Experiment experiment) { StandardExperimentSearchAlgorithmFactory<ITransparentTreeNode, Integer, ISyntheticTreasureIslandProblem> algoFactory = new StandardExperimentSearchAlgorithmFactory<>(); return algoFactory.getAlgorithm(experiment, this.getProblem(experiment)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/SyntheticSearchExperimentBuilder.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import ai.libs.jaicore.experiments.AAlgorithmExperimentBuilder; public abstract class SyntheticSearchExperimentBuilder<B extends SyntheticSearchExperimentBuilder<B>> extends AAlgorithmExperimentBuilder<B> { public SyntheticSearchExperimentBuilder(final ISyntheticSearchExperimentConfig config) { super(config); } public B withBF(final int branchingFactor) { this.set(ISyntheticSearchExperimentConfig.K_BRANCHING, branchingFactor); return this.getMe(); } public B withDepth(final int depth) { this.set(ISyntheticSearchExperimentConfig.K_DEPTH, depth); return this.getMe(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/experiments/TreasureIslandSearchExperimentBuilder.java
package ai.libs.jaicore.search.syntheticgraphs.experiments; import org.aeonbits.owner.ConfigCache; import ai.libs.jaicore.basic.IOwnerBasedRandomConfig; public class TreasureIslandSearchExperimentBuilder extends SyntheticSearchExperimentBuilder<TreasureIslandSearchExperimentBuilder> { public TreasureIslandSearchExperimentBuilder() { this(ConfigCache.getOrCreate(ISyntheticSearchExperimentConfig.class)); } public TreasureIslandSearchExperimentBuilder(final ISyntheticSearchExperimentConfig config) { super(config); } public TreasureIslandSearchExperimentBuilder withSeed(final long seed) { this.set(IOwnerBasedRandomConfig.K_SEED, seed); return this; } public TreasureIslandSearchExperimentBuilder withMaxIslandSize(final double maxIslandSize) { this.set(ITreasureIslandExperimentSetConfig.K_ISLANDS_MAXISLANDSIZE, maxIslandSize); return this; } public TreasureIslandSearchExperimentBuilder withTreasures(final int numTreasures) { this.set(ITreasureIslandExperimentSetConfig.K_ISLANDS_NUMBER_OF_TREASURES, numTreasures); return this; } public TreasureIslandSearchExperimentBuilder withTreasureModel(final String treasureModel) { this.set(ITreasureIslandExperimentSetConfig.K_TREASURE_MODEL, treasureModel); return this; } @Override protected TreasureIslandSearchExperimentBuilder getMe() { return this; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/ITransparentTreeNode.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels; import java.math.BigInteger; public interface ITransparentTreeNode { public int getDepth(); public BigInteger getNumberOfLeftRelativesInSameGeneration(); public BigInteger getNumberOfRightRelativesInSameGeneration(); public BigInteger getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(BigInteger maxNumberOfNodes); /** * Gets the number of leaf nodes of all sub-trees of maximum given size prior to the node on which it is invoked. * * Note that the leafs in the same sub-tree are not counted. * These can be obtained by computing the number of all leaf nodes prior to this one minus the result of this method. * * @param maxNumberOfNodes * @return */ public BigInteger getNumberOfLeafsInSubtreesWithMaxNumberOfNodesPriorToThisNode(BigInteger maxNumberOfNodes); public BigInteger getNumberOfSubtreesWithMaxNumberOfNodes(BigInteger maxNumberOfNodes); public BigInteger getNumberOfLeafsPriorToNodeViaDFS(); public BigInteger getNumberOfLeafsStemmingFromLeftRelativesInSameGeneration(); // siblings that are leafs should count as 1 public BigInteger getNumberOfLeafsUnderNode(); public BigInteger getNumberOfLeafsStemmingFromRightRelativesInSameGeneration(); // siblings that are leafs should count as 1 public int getDistanceToShallowestLeafUnderNode(); public int getDistanceToDeepestLeafUnderNode(); public boolean hasChildren(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/balanced/BalanceGraphSearchProblem.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.balanced; import org.api4.java.ai.graphsearch.problem.implicit.graphgenerator.INodeGoalTester; import ai.libs.jaicore.search.probleminputs.GraphSearchInput; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public class BalanceGraphSearchProblem extends GraphSearchInput<ITransparentTreeNode, Integer> { public BalanceGraphSearchProblem(final int branchingFactor, final int depth) { super(new BalancedGraphGeneratorGenerator(branchingFactor, depth).create(), new INodeGoalTester<ITransparentTreeNode, Integer>() { @Override public boolean isGoal(final ITransparentTreeNode node) { return node.getDepth() == depth; } }); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/balanced/BalancedGraphGeneratorGenerator.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.balanced; import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.IntStream; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import org.api4.java.datastructure.graph.implicit.ILazySuccessorGenerator; import org.api4.java.datastructure.graph.implicit.INewNodeDescription; import org.api4.java.datastructure.graph.implicit.ISingleRootGenerator; import ai.libs.jaicore.basic.MappingIterator; import ai.libs.jaicore.search.model.NodeExpansionDescription; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public class BalancedGraphGeneratorGenerator { private final int branchingFactor; private final int maxDepth; public static int getNumberOfLeafsUnderANonTerminalNodeInDepth(final int depthOfRequestedNode, final int branchingFactor, final int assumedDepthOfTree) { return (int)Math.pow(branchingFactor, assumedDepthOfTree - (double)depthOfRequestedNode); } public static BigInteger getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(final int depth, final BigInteger maxNumberOfNodes, final int branchingFactor, final int maxDepth) { if (depth >= maxDepth) { throw new IllegalArgumentException("A node in depth " + depth + " in a graph with max depth " + maxDepth + " cannot be an inner nodee!"); } /* determine possible height */ int height = 0; BigInteger numberOfNodesForHeight = BigInteger.ONE; while (numberOfNodesForHeight.compareTo(maxNumberOfNodes) <= 0 && height < maxDepth) { height ++; numberOfNodesForHeight = BigInteger.valueOf(branchingFactor).pow(height); } height --; int missingLayers = maxDepth - depth; return BigInteger.valueOf(branchingFactor).pow(missingLayers - height); } public BigInteger getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(final int depth, final BigInteger maxNumberOfNodes) { return getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(depth, maxNumberOfNodes, this.branchingFactor, this.maxDepth); } public class BalancedTreeNode implements ITransparentTreeNode { protected final int depth; protected final BigInteger idOfNodeOnLayer; public BalancedTreeNode(final int depth, final BigInteger idOfNodeOnLayer) { super(); this.depth = depth; this.idOfNodeOnLayer = idOfNodeOnLayer; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.getEnclosingInstance().hashCode(); result = prime * result + this.depth; result = prime * result + ((this.idOfNodeOnLayer == null) ? 0 : this.idOfNodeOnLayer.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } BalancedTreeNode other = (BalancedTreeNode) obj; if (!this.getEnclosingInstance().equals(other.getEnclosingInstance())) { return false; } if (this.depth != other.depth) { return false; } if (this.idOfNodeOnLayer == null) { if (other.idOfNodeOnLayer != null) { return false; } } else if (!this.idOfNodeOnLayer.equals(other.idOfNodeOnLayer)) { return false; } return true; } @Override public String toString() { return "N [depth=" + this.depth + ", idOfNodeOnLayer=" + this.idOfNodeOnLayer + "]"; } @Override public int getDepth() { return this.depth; } @Override public BigInteger getNumberOfLeftRelativesInSameGeneration() { return this.idOfNodeOnLayer; } @Override public BigInteger getNumberOfRightRelativesInSameGeneration() { return BigInteger.valueOf(BalancedGraphGeneratorGenerator.this.branchingFactor).pow(this.depth).subtract(this.idOfNodeOnLayer).subtract(BigInteger.ONE); } @Override public BigInteger getNumberOfLeafsStemmingFromLeftRelativesInSameGeneration() { return this.getNumberOfLeafsUnderNode().multiply(this.getNumberOfLeftRelativesInSameGeneration()); } @Override public BigInteger getNumberOfLeafsUnderNode() { return BigInteger.valueOf(BalancedGraphGeneratorGenerator.this.branchingFactor).pow(BalancedGraphGeneratorGenerator.this.maxDepth - this.depth); } @Override public BigInteger getNumberOfLeafsStemmingFromRightRelativesInSameGeneration() { return this.getNumberOfLeafsUnderNode().multiply(this.getNumberOfRightRelativesInSameGeneration()); } @Override public int getDistanceToShallowestLeafUnderNode() { return BalancedGraphGeneratorGenerator.this.maxDepth - this.depth; } @Override public int getDistanceToDeepestLeafUnderNode() { return this.getDistanceToShallowestLeafUnderNode(); } @Override public BigInteger getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(final BigInteger maxNumberOfNodes) { return this.getNumberOfLeafsPriorToNodeViaDFS().divideAndRemainder(maxNumberOfNodes)[0]; // here we can exploit the special structure of the balanced tree } @Override public BigInteger getNumberOfSubtreesWithMaxNumberOfNodes(final BigInteger maxNumberOfNodes) { return BalancedGraphGeneratorGenerator.this.getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(this.depth, maxNumberOfNodes); } @Override public BigInteger getNumberOfLeafsPriorToNodeViaDFS() { if (this.depth == BalancedGraphGeneratorGenerator.this.maxDepth) { return this.getNumberOfLeftRelativesInSameGeneration(); } else { return this.getNumberOfLeftRelativesInSameGeneration().multiply(BigInteger.valueOf(BalancedGraphGeneratorGenerator.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.depth, BalancedGraphGeneratorGenerator.this.branchingFactor, BalancedGraphGeneratorGenerator.this.maxDepth))); } } @Override public BigInteger getNumberOfLeafsInSubtreesWithMaxNumberOfNodesPriorToThisNode(final BigInteger maxNumberOfNodes) { throw new UnsupportedOperationException(); } private BalancedGraphGeneratorGenerator getEnclosingInstance() { return BalancedGraphGeneratorGenerator.this; } @Override public boolean hasChildren() { throw new UnsupportedOperationException(); } } public BalancedGraphGeneratorGenerator(final int branchingFactor, final int depth) { super(); this.branchingFactor = branchingFactor; this.maxDepth = depth; } public IGraphGenerator<ITransparentTreeNode, Integer> create() { return new IGraphGenerator<ITransparentTreeNode, Integer>() { @Override public ISingleRootGenerator<ITransparentTreeNode> getRootGenerator() { return () -> new BalancedTreeNode(0, BigInteger.ZERO); } @Override public ILazySuccessorGenerator<ITransparentTreeNode, Integer> getSuccessorGenerator() { return new ILazySuccessorGenerator<ITransparentTreeNode, Integer>() { private Map<ITransparentTreeNode, Set<Integer>> successors = new HashMap<>(); @Override public List<INewNodeDescription<ITransparentTreeNode, Integer>> generateSuccessors(final ITransparentTreeNode node) { List<INewNodeDescription<ITransparentTreeNode, Integer>> successorsOfThisNode = new ArrayList<>(); int d = node.getDepth() + 1; if (d > BalancedGraphGeneratorGenerator.this.maxDepth) { return successorsOfThisNode; } for (int i = 0; i < BalancedGraphGeneratorGenerator.this.branchingFactor; i++) { successorsOfThisNode.add(this.generateSuccessor(node, i)); } return successorsOfThisNode; } public NodeExpansionDescription<ITransparentTreeNode, Integer> generateSuccessor(final ITransparentTreeNode node, final int i) { int j = i % BalancedGraphGeneratorGenerator.this.branchingFactor; int d = node.getDepth() + 1; BigInteger leftRelativesInGenerationOfNode = node.getNumberOfLeftRelativesInSameGeneration(); Objects.requireNonNull(leftRelativesInGenerationOfNode); BigInteger offsetForIdOnLayer = BigInteger.valueOf(BalancedGraphGeneratorGenerator.this.branchingFactor).multiply(leftRelativesInGenerationOfNode); BalancedTreeNode successor = new BalancedTreeNode(d, offsetForIdOnLayer.add(BigInteger.valueOf(j))); this.successors.computeIfAbsent(node, n -> new HashSet<>()).add(j); return new NodeExpansionDescription<>(successor, j); } @Override public Iterator<INewNodeDescription<ITransparentTreeNode, Integer>> getIterativeGenerator(final ITransparentTreeNode node) { return new MappingIterator<>(IntStream.range(0, BalancedGraphGeneratorGenerator.this.branchingFactor).iterator(), i -> this.generateSuccessor(node, i)); } }; } }; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/balanced/BalancedGraphSearchWithPathEvaluationsProblem.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.balanced; import java.math.BigInteger; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.jaicore.search.syntheticgraphs.ISyntheticTreasureIslandProblem; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.equalsized.EqualSizedIslandsModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean.ChaoticMeansTreasureModel; public class BalancedGraphSearchWithPathEvaluationsProblem extends GraphSearchWithPathEvaluationsInput<ITransparentTreeNode, Integer, Double> implements ISyntheticTreasureIslandProblem { private final int expectedNumberOfIslands; private final int numTreasures; private final IIslandModel islandModel; private final int exactIslandSize; private final ChaoticMeansTreasureModel treasureModel; public static IIslandModel getIslandModel(final int branchingFactor, final int depth, final int distanceToIslands) { return new EqualSizedIslandsModel(BigInteger.valueOf((long) Math.pow(branchingFactor, depth - (double)distanceToIslands))); } public static ChaoticMeansTreasureModel getTreasureModel(final int branchingFactor, final int depth, final int distanceToIslands, final int numberOfIslandsWithTreasure) { return new ChaoticMeansTreasureModel(numberOfIslandsWithTreasure, getIslandModel(branchingFactor, depth, distanceToIslands), 0); } public BalancedGraphSearchWithPathEvaluationsProblem(final int branchingFactor, final int depth, final int distanceToIslands, final int numberOfIslandsWithTreasure) { super(new BalanceGraphSearchProblem(branchingFactor, depth), getTreasureModel(branchingFactor, depth, distanceToIslands, numberOfIslandsWithTreasure)); this.numTreasures = numberOfIslandsWithTreasure; this.treasureModel = (ChaoticMeansTreasureModel)this.getPathEvaluator(); this.treasureModel.setGraphSearchInput(this); this.islandModel = this.treasureModel.getIslandModel(); this.expectedNumberOfIslands = (int) Math.pow(branchingFactor, distanceToIslands); this.exactIslandSize = BalancedGraphGeneratorGenerator.getNumberOfLeafsUnderANonTerminalNodeInDepth(distanceToIslands, branchingFactor, depth); if ((int) Math.pow(branchingFactor, depth - (double)distanceToIslands)!= this.exactIslandSize) { throw new IllegalStateException("Island size is not computed correctly."); } } @Override public IIslandModel getIslandModel() { return this.islandModel; } @Override public int getExpectedNumberOfIslands() { return this.expectedNumberOfIslands; } @Override public int getMaximumIslandSizes() { return this.exactIslandSize; } @Override public int getMinimumIslandSizes() { return this.exactIslandSize; } @Override public int getNumberOfTreasureIslands() { return this.numTreasures; } @Override public boolean isPathATreasure(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { return this.treasureModel.isPathToTreasureIsland(path); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/degenerated/DegeneratedGraphGeneratorGenerator.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated; import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.stream.IntStream; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import org.api4.java.datastructure.graph.implicit.ILazySuccessorGenerator; import org.api4.java.datastructure.graph.implicit.INewNodeDescription; import org.api4.java.datastructure.graph.implicit.ISingleRootGenerator; import ai.libs.jaicore.basic.MappingIterator; import ai.libs.jaicore.search.model.NodeExpansionDescription; import ai.libs.jaicore.search.syntheticgraphs.ISyntheticGraphGeneratorBuilder; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public class DegeneratedGraphGeneratorGenerator implements ISyntheticGraphGeneratorBuilder { private final Random random; private final int deadEndsPerGeneration; private final int branchingFactor; private final int maxDepth; public BigInteger getNumberOfLeafsUnderANonTerminalNodeInDepth(final int depthOfRequestedNode, final int assumedDepthOfTree) { if (depthOfRequestedNode > assumedDepthOfTree) { throw new IllegalArgumentException("Requested node must not be deeper than the assumed depth of the tree!"); } int remainingDepth = assumedDepthOfTree - depthOfRequestedNode; /* compute the number of inner nodes (including the given one) */ BigInteger innerNodes = BigInteger.ZERO; for (int k = 0; k < remainingDepth; k++) { innerNodes = innerNodes.add(BigInteger.valueOf((long)DegeneratedGraphGeneratorGenerator.this.branchingFactor - DegeneratedGraphGeneratorGenerator.this.deadEndsPerGeneration).pow(k)); } /* compute the leafs stemming directly from the inner nodes */ BigInteger innerDeadEndSolutions = innerNodes.multiply(BigInteger.valueOf(DegeneratedGraphGeneratorGenerator.this.deadEndsPerGeneration)); BigInteger additionalLeafsOnLastLevel = BigInteger.valueOf((long)DegeneratedGraphGeneratorGenerator.this.branchingFactor - DegeneratedGraphGeneratorGenerator.this.deadEndsPerGeneration).pow(remainingDepth); return innerDeadEndSolutions.add(additionalLeafsOnLastLevel); } public BigInteger getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(final int depth, final BigInteger maxNumberOfNodes) { /* check from bottom to top how many nodes can be contained in a single limited sub tree */ int height = 1; while (DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.maxDepth - height, DegeneratedGraphGeneratorGenerator.this.maxDepth).compareTo(maxNumberOfNodes) <= 0) { height ++; } height --; if (height > this.maxDepth) { throw new IllegalStateException("The height of the subtree cannot be higher than the max depth of the tree."); } /* at this point, the height variable contains the height of a sub-tree that can serve as a island. Every node up to a level of maxDepth - height then serves as the root of a sub-graph */ int depthOfLayer = this.maxDepth - height; if (depthOfLayer < depth) { return BigInteger.ZERO; } return DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(depth, depthOfLayer); } public BigInteger getMaxNumberOfLeafsInEverySubtreeWithLimitedNumberOfLeafs(final BigInteger maxNumberOfNodes) { /* check from bottom to top how many leafs can be contained in a single limited sub tree */ int heightFromBottom = 1; while (DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.maxDepth - heightFromBottom, DegeneratedGraphGeneratorGenerator.this.maxDepth).compareTo(maxNumberOfNodes) <= 0) { heightFromBottom ++; } heightFromBottom --; if (heightFromBottom == 1 && DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.maxDepth - 1, DegeneratedGraphGeneratorGenerator.this.maxDepth).compareTo(maxNumberOfNodes) > 0) { return BigInteger.ONE; } BigInteger maxLeafs = this.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.maxDepth - heightFromBottom, this.maxDepth); if (maxLeafs.compareTo(maxNumberOfNodes) > 0) { throw new IllegalStateException("Cannot return a number that is bigger than the initially given limit.\nTo return: " + maxLeafs + "\nLimit: " + maxNumberOfNodes); } return maxLeafs; } public class TreeNode implements ITransparentTreeNode { protected TreeNode parent; protected int depth; protected Set<Integer> indicesOfChildrenWithoutChildren; protected int idOfNodeAmongChildren; protected BigInteger idOfNodeOnLayer; protected int numOfLeftSiblingsThatHaveChildren; protected BigInteger numOfLeftRelativesThatHaveChildren; protected BigInteger numberOfLeafsFoundByDFSWhenReachingThisNode; protected boolean hasChildren; public TreeNode(final TreeNode parent, final int depth, final BigInteger idOfNodeOnLayer, final int idOfNodeAmongChildren, final BigInteger numOfLeftRelativesThatHaveChildren, final boolean hasChildren, final int numOfLeftSiblingsThatHaveChildren, final BigInteger solutionsPriorToThisNodeViaDFS) { super(); this.parent = parent; this.depth = depth; this.idOfNodeAmongChildren = idOfNodeAmongChildren; this.idOfNodeOnLayer = idOfNodeOnLayer; this.numOfLeftRelativesThatHaveChildren = numOfLeftRelativesThatHaveChildren; this.hasChildren = hasChildren; this.numOfLeftSiblingsThatHaveChildren = numOfLeftSiblingsThatHaveChildren; this.numberOfLeafsFoundByDFSWhenReachingThisNode = solutionsPriorToThisNodeViaDFS; this.indicesOfChildrenWithoutChildren = new HashSet<>(); if (hasChildren) { while (this.indicesOfChildrenWithoutChildren.size() < DegeneratedGraphGeneratorGenerator.this.deadEndsPerGeneration) { this.indicesOfChildrenWithoutChildren.add(DegeneratedGraphGeneratorGenerator.this.random.nextInt(DegeneratedGraphGeneratorGenerator.this.branchingFactor)); } } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.depth; result = prime * result + this.idOfNodeOnLayer.hashCode(); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } TreeNode other = (TreeNode) obj; if (this.depth != other.depth) { return false; } return this.idOfNodeOnLayer.equals(other.idOfNodeOnLayer); } @Override public String toString() { return "N [depth=" + this.depth + ", idOfNodeOnLayer=" + this.idOfNodeOnLayer + "]"; } @Override public int getDepth() { return this.depth; } @Override public BigInteger getNumberOfLeftRelativesInSameGeneration() { return this.idOfNodeOnLayer; } @Override public BigInteger getNumberOfLeafsPriorToNodeViaDFS() { return this.numberOfLeafsFoundByDFSWhenReachingThisNode; } @Override public BigInteger getNumberOfRightRelativesInSameGeneration() { return BigInteger.valueOf(DegeneratedGraphGeneratorGenerator.this.branchingFactor).pow(this.depth).subtract(this.idOfNodeOnLayer).subtract(BigInteger.valueOf(1)); } @Override public BigInteger getNumberOfLeafsStemmingFromLeftRelativesInSameGeneration() { throw new UnsupportedOperationException(); } @Override public BigInteger getNumberOfLeafsStemmingFromRightRelativesInSameGeneration() { throw new UnsupportedOperationException(); } @Override public BigInteger getNumberOfLeafsUnderNode() { if (!this.hasChildren) { return BigInteger.valueOf(1); } return DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(this.depth, DegeneratedGraphGeneratorGenerator.this.maxDepth); } @Override public int getDistanceToShallowestLeafUnderNode() { return 0; } @Override public int getDistanceToDeepestLeafUnderNode() { return DegeneratedGraphGeneratorGenerator.this.maxDepth - this.depth; } @Override public BigInteger getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(final BigInteger maxNumberOfNodes) { if (this.parent == null) { return BigInteger.valueOf(0); } /* get number of complete subtrees when arriving at the parent */ BigInteger numSubtreesInducedByParentLevels = this.parent.getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(maxNumberOfNodes); /* if the parent consists of only one such sub-tree itself, return the just computed value */ if (this.parent.getNumberOfLeafsUnderNode().compareTo(maxNumberOfNodes) <= 0) { return numSubtreesInducedByParentLevels; } /* otherwise, sum over the sub-trees of left siblings */ BigInteger maxNumberOfSubTreesForNonTerminalsOfThisDepth = DegeneratedGraphGeneratorGenerator.this.getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(this.depth, maxNumberOfNodes); BigInteger subTreesUnderLeftSiblings = maxNumberOfSubTreesForNonTerminalsOfThisDepth.multiply(BigInteger.valueOf(this.numOfLeftSiblingsThatHaveChildren)); subTreesUnderLeftSiblings = subTreesUnderLeftSiblings.add(BigInteger.valueOf((long)this.idOfNodeAmongChildren - this.numOfLeftSiblingsThatHaveChildren)); return numSubtreesInducedByParentLevels.add(subTreesUnderLeftSiblings); } @Override public BigInteger getNumberOfLeafsInSubtreesWithMaxNumberOfNodesPriorToThisNode(final BigInteger maxNumberOfNodes) { return DegeneratedGraphGeneratorGenerator.this.getMaxNumberOfLeafsInEverySubtreeWithLimitedNumberOfLeafs(maxNumberOfNodes).multiply(this.getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(maxNumberOfNodes)); } @Override public BigInteger getNumberOfSubtreesWithMaxNumberOfNodes(final BigInteger maxNumberOfNodes) { return DegeneratedGraphGeneratorGenerator.this.getNumberOfMaxSubtreesOfMaxLengthUnderNonTerminalNodeInDepth(this.depth, maxNumberOfNodes); } @Override public boolean hasChildren() { return this.hasChildren; } } public DegeneratedGraphGeneratorGenerator(final Random random, final int deadEndsPerGeneration, final int branchingFactor, final int maxDepth) { super(); this.random = random; this.deadEndsPerGeneration = deadEndsPerGeneration; this.branchingFactor = branchingFactor; this.maxDepth = maxDepth; } public class DegeneratedGraphGenerator implements IGraphGenerator<ITransparentTreeNode, Integer> { @Override public ISingleRootGenerator<ITransparentTreeNode> getRootGenerator() { return () -> new TreeNode(null, 0, BigInteger.ZERO, 0, BigInteger.ZERO, true, 0, BigInteger.ZERO); } @Override public ILazySuccessorGenerator<ITransparentTreeNode, Integer> getSuccessorGenerator() { return new ILazySuccessorGenerator<ITransparentTreeNode, Integer>() { private Map<ITransparentTreeNode, Set<Integer>> successors = new HashMap<>(); @Override public List<INewNodeDescription<ITransparentTreeNode, Integer>> generateSuccessors(final ITransparentTreeNode node) throws InterruptedException { TreeNode tNode = (TreeNode) node; List<INewNodeDescription<ITransparentTreeNode, Integer>> successorsOfThisNode = new ArrayList<>(); if (!tNode.hasChildren) { return successorsOfThisNode; } int d = node.getDepth() + 1; if (d > DegeneratedGraphGeneratorGenerator.this.maxDepth) { return successorsOfThisNode; } Iterator<INewNodeDescription<ITransparentTreeNode, Integer>> it = this.getIterativeGenerator(node); while (it.hasNext()) { successorsOfThisNode.add(it.next()); } return successorsOfThisNode; } private INewNodeDescription<ITransparentTreeNode, Integer> getSuccessor(final ITransparentTreeNode node, final int indexOfChild) { TreeNode tNode = (TreeNode) node; if (!tNode.hasChildren) { throw new IllegalArgumentException("Node " + node + " has no children and, hence, cannot have any successor being generated."); } int j = indexOfChild % DegeneratedGraphGeneratorGenerator.this.branchingFactor; // note that j is also the number of left siblings int d = node.getDepth() + 1; /* compute offset of ids for successors under this node, and also the number of nodes left of the successor that have children */ BigInteger offsetForIdOnLayer = tNode.numOfLeftRelativesThatHaveChildren.multiply(BigInteger.valueOf(DegeneratedGraphGeneratorGenerator.this.branchingFactor)); BigInteger numOfLeftRelativesThatHaveChildren = tNode.numOfLeftRelativesThatHaveChildren.multiply(BigInteger.valueOf((long)DegeneratedGraphGeneratorGenerator.this.branchingFactor - DegeneratedGraphGeneratorGenerator.this.deadEndsPerGeneration)); int numOfLeftSiblingsThatHaveChildren = 0; long numOfLeftSiblingsWithoutChildren = 0; for (int k = 0; k < j; k++) { if (!tNode.indicesOfChildrenWithoutChildren.contains(k)) { numOfLeftSiblingsThatHaveChildren++; } else { numOfLeftSiblingsWithoutChildren++; // these are leafs themselves } } BigInteger numOfLeftSiblingsWithChildrenAsBigInt = BigInteger.valueOf(numOfLeftSiblingsThatHaveChildren); numOfLeftRelativesThatHaveChildren = numOfLeftRelativesThatHaveChildren.add(numOfLeftSiblingsWithChildrenAsBigInt); /* compute number of solutions found by DFS when reaching this node */ BigInteger numOfSolutionsOfEveryLeftSiblingWithChildren = DegeneratedGraphGeneratorGenerator.this.getNumberOfLeafsUnderANonTerminalNodeInDepth(d, DegeneratedGraphGeneratorGenerator.this.maxDepth); BigInteger numOfSolutionsUnderLeftSiblings = numOfSolutionsOfEveryLeftSiblingWithChildren.multiply(numOfLeftSiblingsWithChildrenAsBigInt).add(BigInteger.valueOf(numOfLeftSiblingsWithoutChildren)); BigInteger numberOfSolutionsFoundByDFS = tNode.numberOfLeafsFoundByDFSWhenReachingThisNode.add(numOfSolutionsUnderLeftSiblings); /* check whether the node has children itself */ boolean hasChildren = !tNode.indicesOfChildrenWithoutChildren.contains(indexOfChild) && d < DegeneratedGraphGeneratorGenerator.this.maxDepth; /* create node */ TreeNode successor = new TreeNode(tNode, d, offsetForIdOnLayer.add(BigInteger.valueOf(j)), j, numOfLeftRelativesThatHaveChildren, hasChildren, numOfLeftSiblingsThatHaveChildren, numberOfSolutionsFoundByDFS); this.successors.computeIfAbsent(node, n -> new HashSet<>()).add(j); return new NodeExpansionDescription<>(successor, j); } @Override public Iterator<INewNodeDescription<ITransparentTreeNode, Integer>> getIterativeGenerator(final ITransparentTreeNode node) { return new MappingIterator<>(IntStream.range(0, DegeneratedGraphGeneratorGenerator.this.branchingFactor).iterator(), i -> this.getSuccessor(node, i)); } }; } public BigInteger getMaxNumberOfLeafsInEverySubtreeOfMaxLength(final BigInteger maxNumberOfNodes) { return DegeneratedGraphGeneratorGenerator.this.getMaxNumberOfLeafsInEverySubtreeWithLimitedNumberOfLeafs(maxNumberOfNodes); } } @Override public IGraphGenerator<ITransparentTreeNode, Integer> build() { return new DegeneratedGraphGenerator(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/degenerated/DegeneratedGraphSearchProblem.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated; import java.util.Random; import org.api4.java.ai.graphsearch.problem.implicit.graphgenerator.INodeGoalTester; import ai.libs.jaicore.search.probleminputs.GraphSearchInput; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated.DegeneratedGraphGeneratorGenerator.DegeneratedGraphGenerator; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated.DegeneratedGraphGeneratorGenerator.TreeNode; public class DegeneratedGraphSearchProblem extends GraphSearchInput<ITransparentTreeNode, Integer> { public DegeneratedGraphSearchProblem(final Random r, final int deadEndsPerGeneration, final int branchingFactor, final int depth) { super(new DegeneratedGraphGeneratorGenerator(r, deadEndsPerGeneration, branchingFactor, depth).build(), new INodeGoalTester<ITransparentTreeNode, Integer>() { @Override public boolean isGoal(final ITransparentTreeNode node) { return !((TreeNode)node).hasChildren; } }); } @Override public DegeneratedGraphGenerator getGraphGenerator() { return (DegeneratedGraphGenerator)super.getGraphGenerator(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/graphmodels/degenerated/DegeneratedGraphSearchWithPathEvaluationsProblem.java
package ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated; import java.math.BigInteger; import java.util.Random; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.jaicore.search.syntheticgraphs.ISyntheticTreasureIslandProblem; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.degenerated.DegeneratedGraphGeneratorGenerator.DegeneratedGraphGenerator; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.ITreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean.ChaoticMeansTreasureModel; public class DegeneratedGraphSearchWithPathEvaluationsProblem extends GraphSearchWithPathEvaluationsInput<ITransparentTreeNode, Integer, Double> implements ISyntheticTreasureIslandProblem { private final int expectedNumberOfIslands; private final int numTreasures; private final IIslandModel islandModel; private final int exactIslandSize; private final ITreasureModel treasureModel; public DegeneratedGraphSearchWithPathEvaluationsProblem(final Random random, final int deadEndsPerGeneration, final int branchingFactor, final int depth, final int maxIslandSize, final int numberOfIslandsWithTreasure, final IIslandModel islandModel, final ITreasureModel treasureModel) { super(new DegeneratedGraphSearchProblem(random, deadEndsPerGeneration, branchingFactor, depth), treasureModel); this.treasureModel = treasureModel; if (treasureModel instanceof ChaoticMeansTreasureModel) { ((ChaoticMeansTreasureModel)treasureModel).setGraphSearchInput(this); } this.exactIslandSize = this.getGraphGenerator().getMaxNumberOfLeafsInEverySubtreeOfMaxLength(BigInteger.valueOf(maxIslandSize)).intValueExact(); this.islandModel = islandModel; this.numTreasures = numberOfIslandsWithTreasure; this.expectedNumberOfIslands = this.getGraphGenerator().getRootGenerator().getRoots().iterator().next().getNumberOfSubtreesWithMaxNumberOfNodes(BigInteger.valueOf(maxIslandSize)).intValueExact(); // this.logger.info("Now considering graph with bf {}, depth {}, (exact) island size {} (max configured size was {}), and {} islands with treasure.", this.branchingFactor, this.depth, this.exactIslandSize, // this.maxIslandSize.intValueExact(), this.numberOfIslandsWithTreasure.intValue()); } @Override public DegeneratedGraphGenerator getGraphGenerator() { return (DegeneratedGraphGenerator)super.getGraphGenerator(); } @Override public IIslandModel getIslandModel() { return this.islandModel; } @Override public int getExpectedNumberOfIslands() { return this.expectedNumberOfIslands; } @Override public int getMaximumIslandSizes() { return this.exactIslandSize; } @Override public int getMinimumIslandSizes() { return 1; } @Override public int getNumberOfTreasureIslands() { return this.numTreasures; } @Override public boolean isPathATreasure(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { return this.treasureModel.isPathToTreasureIsland(path); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/islandmodels/IIslandModel.java
package ai.libs.jaicore.search.syntheticgraphs.islandmodels; import java.math.BigInteger; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; /** * Gives information about the number of islands and the id of the island to which a particular path leads. * * setRootNode or getIsland must be invoked before getNumberOfIslands is called * * @author fmohr * */ public interface IIslandModel { public void setRootNode(ITransparentTreeNode root); public BigInteger getIsland(ILabeledPath<ITransparentTreeNode, Integer> path); public BigInteger getSizeOfIsland(ILabeledPath<ITransparentTreeNode, Integer> path); public BigInteger getPositionOnIsland(ILabeledPath<ITransparentTreeNode, Integer> path); public BigInteger getNumberOfIslands(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/islandmodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/islandmodels/equalsized/EqualSizedIslandsModel.java
package ai.libs.jaicore.search.syntheticgraphs.islandmodels.equalsized; import java.math.BigInteger; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; public class EqualSizedIslandsModel implements IIslandModel { private final BigInteger size; private long numberOfIslands = -1; private ITransparentTreeNode rootNode; public EqualSizedIslandsModel(final int size) { this(BigInteger.valueOf(size)); } public EqualSizedIslandsModel(final BigInteger size) { super(); this.size = size; } @Override public BigInteger getIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return path.getHead().getNumberOfSubtreesWithMaxNumberOfNodesPriorToThisNode(this.size); } @Override public BigInteger getNumberOfIslands() { if (this.rootNode == null) { throw new IllegalStateException("Root has not been initialized yet!"); } return this.rootNode.getNumberOfSubtreesWithMaxNumberOfNodes(this.size); } @Override public void setRootNode(final ITransparentTreeNode root) { this.rootNode = root; } @Override public BigInteger getSizeOfIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { ILabeledPath<ITransparentTreeNode, Integer> currentPath = path; while (!currentPath.getArcs().isEmpty() && currentPath.getPathToParentOfHead().getHead().getNumberOfLeafsUnderNode().compareTo(this.size) <= 0) { currentPath = currentPath.getPathToParentOfHead(); } BigInteger sizeOfThisIsland = currentPath.getHead().getNumberOfLeafsUnderNode(); return sizeOfThisIsland; } @Override public BigInteger getPositionOnIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return path.getHead().getNumberOfLeafsPriorToNodeViaDFS().subtract(path.getHead().getNumberOfLeafsInSubtreesWithMaxNumberOfNodesPriorToThisNode(this.size)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/ITreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; public interface ITreasureModel extends IPathEvaluator<ITransparentTreeNode, Integer, Double>, ILoggingCustomizable { public boolean isPathToTreasureIsland(ILabeledPath<ITransparentTreeNode, Integer> path); public double getMinimumAchievable(); }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/LinearTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels; import java.math.BigInteger; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; /** * Distributes scores according to a simple linear function from the left to the right. * * @author felix * */ public class LinearTreasureModel implements ITreasureModel { private Logger logger = LoggerFactory.getLogger(LinearTreasureModel.class); private final boolean asc; public LinearTreasureModel() { this(true); } public LinearTreasureModel(final boolean asc) { super(); this.asc = asc; } @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { BigInteger numLeafsBefore = path.getHead().getNumberOfLeafsPriorToNodeViaDFS(); return this.asc ? numLeafsBefore.doubleValue() : path.getRoot().getNumberOfLeafsUnderNode().subtract(numLeafsBefore).doubleValue(); } @Override public double getMinimumAchievable() { return 0; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { throw new UnsupportedOperationException(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/AIslandTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands; import java.math.BigInteger; import org.api4.java.datastructure.graph.ILabeledPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.ITreasureModel; public abstract class AIslandTreasureModel implements ITreasureModel { private final IIslandModel islandModel; protected Logger logger = LoggerFactory.getLogger("treasuremodel." + this.getClass().getName()); public AIslandTreasureModel(final IIslandModel islandModel) { super(); this.islandModel = islandModel; } public IIslandModel getIslandModel() { return this.islandModel; } public BigInteger getTotalNumberOfIslands() { return this.islandModel.getNumberOfIslands(); } public BigInteger getPositionOnIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.getIslandModel().getPositionOnIsland(path).add(BigInteger.ONE); } public BigInteger getIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.getIslandModel().getIsland(path); } public BigInteger getIslandSize(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.getIslandModel().getSizeOfIsland(path); } @Override public void setLoggerName(final String loggerName) { this.logger = LoggerFactory.getLogger(loggerName); } @Override public String getLoggerName() { return this.logger.getName(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/funnel/AbyssTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.math.linearalgebra.AffineFunction; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.AIslandTreasureModel; /** * the landscape is: * * plateau + mountain + abyss + mountain + plateau * * @author felix * */ public class AbyssTreasureModel extends AIslandTreasureModel { private final int numberOfTreasureIslands; private final Set<BigInteger> indicesOfIslands = new HashSet<>(); private final int seed; private final double plateauMinForTreasures; private final double plateauMaxForTreasures; private final double plateauMinForNonTreasures; private final double plateauMaxForNonTreasures; private final double plateauWidths; // portion of the island that is plateau private final double moutainWidths; private final double mountainHeight; private final double abyssDepth; public AbyssTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final int seed, final double plateauMinForTreasures, final double plateauMaxForTreasures, final double plateauMinForNonTreasures, final double plateauMaxForNonTreasures, final double plateauWidths, final double moutainWidths, final double mountainHeight, final double abyssDepth) { super(islandModel); this.numberOfTreasureIslands = numberOfTreasureIslands; this.seed = seed; this.plateauMinForTreasures = plateauMinForTreasures; this.plateauMaxForTreasures = plateauMaxForTreasures; this.plateauMinForNonTreasures = plateauMinForNonTreasures; this.plateauMaxForNonTreasures = plateauMaxForNonTreasures; this.plateauWidths = plateauWidths; this.moutainWidths = moutainWidths; this.mountainHeight = mountainHeight; this.abyssDepth = abyssDepth; if (this.moutainWidths + this.plateauWidths >= 1) { throw new IllegalArgumentException(); } } private double minimumAchievable = Double.MAX_VALUE; private final Map<BigInteger, Double> plateausOfIslands = new HashMap<>(); public AbyssTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final Random random) { this(islandModel, numberOfTreasureIslands, random.nextInt(), 0.1, 0.15, .2, .8, .2, .4, .2, .1); } private void distributeTreasures() { Random random = new Random(this.seed); while (this.indicesOfIslands.size() < this.numberOfTreasureIslands) { BigInteger newTreasureIsland; do { newTreasureIsland = new BigInteger(this.getIslandModel().getNumberOfIslands().bitLength(), random); } while (newTreasureIsland.compareTo(this.getIslandModel().getNumberOfIslands()) >= 0); this.indicesOfIslands.add(newTreasureIsland); } for (BigInteger island : this.indicesOfIslands) { double plateauOfThisIsland = this.plateauMinForTreasures + (this.plateauMaxForTreasures - this.plateauMinForTreasures) * random.nextDouble(); this.plateausOfIslands.put(island, plateauOfThisIsland); this.minimumAchievable = Math.min(this.minimumAchievable, plateauOfThisIsland - this.abyssDepth); } } @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { if (this.indicesOfIslands.isEmpty()) { this.getIslandModel().setRootNode(path.getRoot()); this.distributeTreasures(); } BigInteger positionOnIsland = this.getPositionOnIsland(path); BigInteger island = this.getIsland(path); if (!this.plateausOfIslands.containsKey(island)) { this.plateausOfIslands.put(island, this.plateauMinForNonTreasures + (this.plateauMaxForNonTreasures - this.plateauMinForNonTreasures) * new Random(path.hashCode() + (long)this.seed).nextDouble()); } double plateauOfIsland = this.plateausOfIslands.get(island); /* compute important island positions for distribution */ BigInteger islandSize = this.getIsland(path); if (positionOnIsland.compareTo(islandSize) > 0) { throw new IllegalStateException("Position on island cannot be greater than the island itself."); } BigDecimal islandSizeAsDecimal = new BigDecimal(islandSize); BigDecimal k1; // first kink BigDecimal p1; // first peak BigDecimal k2; // second kink BigDecimal k3; // third kink BigDecimal p2; // second peak BigDecimal k4; // fourth kink BigDecimal median; BigDecimal mountainSegment = islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.moutainWidths / 4.0)); BigDecimal abyssSegment = islandSizeAsDecimal.multiply(BigDecimal.valueOf((1 - this.moutainWidths - this.plateauWidths) / 2)); k1 = islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.plateauWidths / 2.0)).round(new MathContext(1, RoundingMode.CEILING)); p1 = k1.add(mountainSegment); k2 = p1.add(mountainSegment); median = k2.add(abyssSegment); k3 = median.add(abyssSegment); p2 = k3.add(mountainSegment); k4 = p2.add(mountainSegment); if (k4.compareTo(islandSizeAsDecimal) >= 0) { throw new IllegalStateException(); } double val; BigDecimal absolutePlateauHeight = BigDecimal.valueOf(plateauOfIsland); BigDecimal absoluteMountainHeight = BigDecimal.valueOf(plateauOfIsland).add(BigDecimal.valueOf(this.mountainHeight)); BigDecimal absoluteAbyssDepth = BigDecimal.valueOf(plateauOfIsland).subtract(BigDecimal.valueOf(this.abyssDepth)); if (positionOnIsland.compareTo(k1.toBigInteger()) <= 0 || positionOnIsland.compareTo(k4.toBigInteger()) > 0) { val = plateauOfIsland; } else if (positionOnIsland.compareTo(p1.toBigInteger()) <= 0) { val = new AffineFunction(k1, BigDecimal.valueOf(plateauOfIsland), p1, absoluteMountainHeight).applyAsDouble(positionOnIsland); } else if (positionOnIsland.compareTo(k2.toBigInteger()) <= 0) { val = new AffineFunction(p1, absoluteMountainHeight, k2, absolutePlateauHeight).applyAsDouble(positionOnIsland); } else if (positionOnIsland.compareTo(median.toBigInteger()) <= 0) { val = new AffineFunction(k2, absolutePlateauHeight, median, absoluteAbyssDepth).applyAsDouble(positionOnIsland); } else if (positionOnIsland.compareTo(k3.toBigInteger()) <= 0) { val = new AffineFunction(median, absoluteAbyssDepth, k3, absolutePlateauHeight).applyAsDouble(positionOnIsland); } else if (positionOnIsland.compareTo(p2.toBigInteger()) <= 0) { val = new AffineFunction(k3, absolutePlateauHeight, p2, absoluteMountainHeight).applyAsDouble(positionOnIsland); } else if (positionOnIsland.compareTo(k4.toBigInteger()) <= 0) { val = new AffineFunction(p2, absoluteMountainHeight, k4, absolutePlateauHeight).applyAsDouble(positionOnIsland); } else { throw new IllegalStateException("This case should never occur!"); } return val; } @Override public double getMinimumAchievable() { return this.minimumAchievable; } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.indicesOfIslands.contains(this.getIsland(path)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/funnel/DominatedFunnelTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel; import java.math.BigDecimal; import java.math.BigInteger; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.AIslandTreasureModel; public class DominatedFunnelTreasureModel extends AIslandTreasureModel { public DominatedFunnelTreasureModel(final IIslandModel islandModel, final Random random) { super(islandModel); this.random = random; this.seed = random.nextLong(); } private final Random random; private final long seed; private final int numberOfTreasureIslands = 1; private final double plateauMin = 0.5; private final double maxPlateauAdvantageOfSubOptimals = 0.2; private double bestPlateauOfTreasures; private final double relativeInnerWidth = 0.1; private final double absSlopeOfInnerPlateau = 0.001; private final double relativeFunnelWidth = Math.pow(10, -10); private final Set<BigInteger> indicesOfTreasureIslands = new HashSet<>(); private final Map<BigInteger, Double> plateausOfIslands = new HashMap<>(); private void distributeTreasures() { Random localRandom = new Random(this.seed); this.bestPlateauOfTreasures = Double.MAX_VALUE; /* determine indices of treasure island(s) */ while (this.indicesOfTreasureIslands.size() < this.numberOfTreasureIslands) { BigInteger newTreasureIsland; do { newTreasureIsland = new BigInteger(this.getIslandModel().getNumberOfIslands().bitLength(), localRandom); } while (newTreasureIsland.compareTo(this.getIslandModel().getNumberOfIslands()) >= 0); this.indicesOfTreasureIslands.add(newTreasureIsland); } /* compute the quality of the treasure islands */ for (BigInteger island : this.indicesOfTreasureIslands) { double plateauOfThisIsland = this.plateauMin + (.9 - this.plateauMin) * localRandom.nextDouble(); this.plateausOfIslands.put(island, plateauOfThisIsland); this.bestPlateauOfTreasures = Math.min(this.bestPlateauOfTreasures, plateauOfThisIsland); } this.logger.info("Treasure plateaus: {}. Treasure island: {}", this.getInnerPlateauOfTreasureIsland(this.bestPlateauOfTreasures), this.indicesOfTreasureIslands); } @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { if (this.indicesOfTreasureIslands.isEmpty()) { this.getIslandModel().setRootNode(path.getRoot()); this.distributeTreasures(); } BigInteger positionOnIsland = this.getPositionOnIsland(path); BigInteger island = this.getIsland(path); if (!this.plateausOfIslands.containsKey(island)) { this.plateausOfIslands.put(island, this.bestPlateauOfTreasures - this.maxPlateauAdvantageOfSubOptimals * (.5 - new Random(path.hashCode() + this.seed).nextDouble())); } double plateauOfIsland = this.plateausOfIslands.get(island); /* if this is not a treasure island, just return the plateau value */ if (!this.indicesOfTreasureIslands.contains(island)) { return plateauOfIsland; } /* compute the relative position of the solution on the island */ BigInteger islandSize = this.getIsland(path); if (positionOnIsland.compareTo(islandSize) > 0) { throw new IllegalStateException("Position on island cannot be greater than the island itself."); } double relativePositionOnIsland = new BigDecimal(positionOnIsland).divide(new BigDecimal(islandSize)).doubleValue(); /* if the position is not in the inner part, return the plateau value */ double massOfBadPlateau = 1 - this.relativeInnerWidth; if (relativePositionOnIsland < massOfBadPlateau / 2 || (1 - relativePositionOnIsland) < massOfBadPlateau / 2) { return plateauOfIsland; } /* if the position is in the inner part but not within the full, return the good plateau */ double marginOfFunnel = (1 - this.relativeFunnelWidth) / 2; if (relativePositionOnIsland < marginOfFunnel || (1 - relativePositionOnIsland) < marginOfFunnel) { double relativePositionInInnerPlateau = (relativePositionOnIsland - massOfBadPlateau / 2) / this.relativeInnerWidth; if (relativePositionInInnerPlateau > 1) { throw new IllegalStateException(); } double distanceToPlateauBorder = relativePositionInInnerPlateau < .5 ? relativePositionInInnerPlateau : (1 - relativePositionInInnerPlateau); double plateauMax = this.getInnerPlateauOfTreasureIsland(this.bestPlateauOfTreasures); return plateauMax - this.absSlopeOfInnerPlateau * distanceToPlateauBorder; } return this.random.nextDouble() * .1; } private double getInnerPlateauOfTreasureIsland(final double nivel) { return nivel - .5 * this.maxPlateauAdvantageOfSubOptimals - 0.01; } @Override public double getMinimumAchievable() { return 0.0; } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.indicesOfTreasureIslands.contains(this.getIsland(path)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/funnel/FunnelTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.math.linearalgebra.AffineFunction; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.AIslandTreasureModel; public class FunnelTreasureModel extends AIslandTreasureModel { private final int numberOfTreasureIslands; private final Set<BigInteger> indicesOfIslands = new HashSet<>(); private final int seed; private final double plateauMinForTreasures; private final double plateauMaxForTreasures; private final double plateauMinForNonTreasures; private final double plateauMaxForNonTreasures; private final double plateauSizes; // portion of the island that is plateau private final double funnelDepth; private double minimumAchievable = Double.MAX_VALUE; private final Map<BigInteger, Double> plateausOfIslands = new HashMap<>(); public FunnelTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final int seed, final double plateauMinForTreasures, final double plateauMaxForTreasures, final double plateauMinForNonTreasures, final double plateauMaxForNonTreasures, final double plateauSizes, final double funnelDepth) { super(islandModel); this.numberOfTreasureIslands = numberOfTreasureIslands; this.seed = seed; this.plateauMinForTreasures = plateauMinForTreasures; this.plateauMaxForTreasures = plateauMaxForTreasures; this.plateauMinForNonTreasures = plateauMinForNonTreasures; this.plateauMaxForNonTreasures = plateauMaxForNonTreasures; this.plateauSizes = plateauSizes; this.funnelDepth = funnelDepth; } public FunnelTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final Random random) { this(islandModel, numberOfTreasureIslands, random.nextInt(), 0.1, 0.15, 0.7, 0.95, 0.5, .1); } private void distributeTreasures() { Random random = new Random(this.seed); while (this.indicesOfIslands.size() < this.numberOfTreasureIslands) { BigInteger newTreasureIsland; do { newTreasureIsland = new BigInteger(this.getIslandModel().getNumberOfIslands().bitLength(), random); } while (newTreasureIsland.compareTo(this.getIslandModel().getNumberOfIslands()) >= 0); this.indicesOfIslands.add(newTreasureIsland); } for (BigInteger island : this.indicesOfIslands) { double plateauOfThisIsland = this.plateauMinForTreasures + (this.plateauMaxForTreasures - this.plateauMinForTreasures) * random.nextDouble(); this.plateausOfIslands.put(island, plateauOfThisIsland); this.minimumAchievable = Math.min(this.minimumAchievable, plateauOfThisIsland - this.funnelDepth); } } @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { if (this.indicesOfIslands.isEmpty()) { this.getIslandModel().setRootNode(path.getRoot()); this.distributeTreasures(); } BigInteger positionOnIsland = this.getPositionOnIsland(path); BigInteger island = this.getIsland(path); if (!this.plateausOfIslands.containsKey(island)) { this.plateausOfIslands.put(island, this.plateauMinForNonTreasures + (this.plateauMaxForNonTreasures - this.plateauMinForNonTreasures) * new Random(path.hashCode() + (long)this.seed).nextDouble()); } double plateauOfIsland = this.plateausOfIslands.get(island); /* compute important island positions for distribution */ BigInteger islandSize = this.getIsland(path); if (positionOnIsland.compareTo(islandSize) > 0) { throw new IllegalStateException("Position on island cannot be greater than the island itself."); } BigDecimal islandSizeAsDecimal = new BigDecimal(islandSize); BigDecimal c1; BigDecimal c2; BigDecimal median; if (islandSize.remainder(BigInteger.valueOf(2)).equals(BigInteger.ZERO)) { c1 = islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.plateauSizes / 2.0)).round(new MathContext(1, RoundingMode.CEILING)); c2 = islandSizeAsDecimal.subtract(c1).round(new MathContext(1, RoundingMode.FLOOR)); median = islandSizeAsDecimal.divide(BigDecimal.valueOf(2)); } else { c1 = islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.plateauSizes / 2.0)).round(new MathContext(1, RoundingMode.FLOOR)); c2 = islandSizeAsDecimal.subtract(islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.plateauSizes / 2.0))).round(new MathContext(1, RoundingMode.CEILING)).add(BigDecimal.ONE); median = islandSizeAsDecimal.add(BigDecimal.ONE).divide(BigDecimal.valueOf(2)); } double val; if (positionOnIsland.compareTo(c1.toBigInteger()) <= 0 || positionOnIsland.compareTo(c2.toBigInteger()) > 0) { val = plateauOfIsland; } else if (positionOnIsland.compareTo(median.toBigInteger()) <= 0) { val = new AffineFunction(c1, BigDecimal.valueOf(plateauOfIsland), median, BigDecimal.valueOf(plateauOfIsland).subtract(BigDecimal.valueOf(this.funnelDepth))).applyAsDouble(positionOnIsland); } else { val = new AffineFunction(c2, BigDecimal.valueOf(plateauOfIsland), median, BigDecimal.valueOf(plateauOfIsland).subtract(BigDecimal.valueOf(this.funnelDepth))).applyAsDouble(positionOnIsland); } return val; } @Override public double getMinimumAchievable() { return this.minimumAchievable; } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { if (this.indicesOfIslands.isEmpty()) { this.getIslandModel().setRootNode(path.getRoot()); this.distributeTreasures(); } return this.indicesOfIslands.contains(this.getIsland(path)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/funnel/RelativeFunnelTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.funnel; import java.math.BigDecimal; import java.math.BigInteger; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.math.linearalgebra.AffineFunction; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.AIslandTreasureModel; public class RelativeFunnelTreasureModel extends AIslandTreasureModel { private final int numberOfTreasureIslands; private final Set<BigInteger> indicesOfIslands = new HashSet<>(); private final int seed; private final double plateauMinForTreasures; private final double plateauMaxForTreasures; private final double plateauMinForNonTreasures; private final double plateauMaxForNonTreasures; private final double plateauSizes; private double minimumAchievable = Double.MAX_VALUE; private final Map<BigInteger, Double> plateausOfIslands = new HashMap<>(); public double getDepthOfFunnel(final double plateau) { return 6.29 * Math.pow(plateau, 1.25) - 5.29 * plateau; } public double getMinimumOfFunnel(final double plateau) { return plateau - this.getDepthOfFunnel(plateau); } public RelativeFunnelTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final int seed, final double plateauMinForTreasures, final double plateauMaxForTreasures, final double plateauMinForNonTreasures, final double plateauMaxForNonTreasures, final double plateauSizes) { super(islandModel); this.numberOfTreasureIslands = numberOfTreasureIslands; this.seed = seed; this.plateauMinForTreasures = plateauMinForTreasures; this.plateauMaxForTreasures = plateauMaxForTreasures; this.plateauMinForNonTreasures = plateauMinForNonTreasures; this.plateauMaxForNonTreasures = plateauMaxForNonTreasures; this.plateauSizes = plateauSizes; } public RelativeFunnelTreasureModel(final IIslandModel islandModel, final int numberOfTreasureIslands, final Random random) { this(islandModel, numberOfTreasureIslands, random.nextInt(), .8, 1, .5, .8, .8); } private void distributeTreasures() { Random random = new Random(this.seed); while (this.indicesOfIslands.size() < this.numberOfTreasureIslands) { BigInteger newTreasureIsland; do { newTreasureIsland = new BigInteger(this.getIslandModel().getNumberOfIslands().bitLength(), random); } while (newTreasureIsland.compareTo(this.getIslandModel().getNumberOfIslands()) >= 0); this.indicesOfIslands.add(newTreasureIsland); } for (BigInteger island : this.indicesOfIslands) { double plateauOfThisIsland = this.plateauMinForTreasures + (this.plateauMaxForTreasures - this.plateauMinForTreasures) * random.nextDouble(); this.plateausOfIslands.put(island, plateauOfThisIsland); this.minimumAchievable = Math.min(this.minimumAchievable, this.getMinimumOfFunnel(plateauOfThisIsland)); } } @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { if (this.indicesOfIslands.isEmpty()) { this.getIslandModel().setRootNode(path.getRoot()); this.distributeTreasures(); } BigInteger positionOnIsland = this.getPositionOnIsland(path); BigInteger island = this.getIsland(path); if (!this.plateausOfIslands.containsKey(island)) { this.plateausOfIslands.put(island, this.plateauMinForNonTreasures + (this.plateauMaxForNonTreasures - this.plateauMinForNonTreasures) * new Random(path.hashCode() * (long)this.seed).nextDouble()); } double plateauOfIsland = this.plateausOfIslands.get(island); /* compute important island positions for distribution */ BigInteger islandSize = this.getIslandSize(path); if (positionOnIsland.compareTo(islandSize) > 0) { throw new IllegalStateException("Position on island cannot be greater than the island itself."); } BigDecimal islandSizeAsDecimal = new BigDecimal(islandSize); BigDecimal abyssSegment = islandSizeAsDecimal.multiply(BigDecimal.valueOf((1 - this.plateauSizes) / 2)); BigDecimal c1; BigDecimal c2; BigDecimal median; c1 = islandSizeAsDecimal.multiply(BigDecimal.valueOf(this.plateauSizes / 2.0)); median = c1.add(abyssSegment); c2 = median.add(abyssSegment); BigDecimal minimumOfFunnel = BigDecimal.valueOf(this.getMinimumOfFunnel(plateauOfIsland)); /* now compute value for current position */ double val; if (positionOnIsland.compareTo(c1.toBigInteger()) <= 0 || positionOnIsland.compareTo(c2.toBigInteger()) > 0) { val = plateauOfIsland; } else if (positionOnIsland.compareTo(median.toBigInteger()) <= 0) { val = new AffineFunction(c1, BigDecimal.valueOf(plateauOfIsland), median, minimumOfFunnel).applyAsDouble(positionOnIsland); } else { val = new AffineFunction(c2, BigDecimal.valueOf(plateauOfIsland), median, minimumOfFunnel).applyAsDouble(positionOnIsland); } return val; } @Override public double getMinimumAchievable() { return this.minimumAchievable; } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.indicesOfIslands.contains(this.getIsland(path)); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/noisymean/ATreasureMeanFunction.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean; import java.math.BigInteger; import java.util.function.Function; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; public abstract class ATreasureMeanFunction implements Function<BigInteger, Double> { private final IIslandModel islandModel; private final long numberOfTreasures; public ATreasureMeanFunction(final IIslandModel islandModel, final long numberOfTreasures) { super(); this.islandModel = islandModel; this.numberOfTreasures = numberOfTreasures; } public BigInteger getTotalNumberOfIslands() { return this.islandModel.getNumberOfIslands(); } public long getNumberOfTreasures() { return this.numberOfTreasures; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/noisymean/ChaoticMeansTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean; import java.math.BigInteger; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Random; import java.util.Set; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.algorithms.standard.random.RandomSearch; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; /** * In this model, the means for the islands do not follow any pattern but are just drawn randomly. * * @author fmohr * * @param <N> */ public class ChaoticMeansTreasureModel extends NoisyMeanTreasureModel { private final int numberOfIslandsWithTreasure; private final Map<BigInteger, Double> means = new HashMap<>(); private final Random random; private final Set<BigInteger> indicesOfIslands = new HashSet<>(); private boolean treasuresDistributed = false; private IPathSearchInput<ITransparentTreeNode, Integer> graphSearchInput; public ChaoticMeansTreasureModel(final int numberOfIslandsWithTreasure, final IIslandModel islandModel, final long seed) { this(numberOfIslandsWithTreasure, islandModel, new Random(seed)); } public ChaoticMeansTreasureModel(final int numberOfIslandsWithTreasure, final IIslandModel islandModel, final Random r) { super(islandModel); this.numberOfIslandsWithTreasure = numberOfIslandsWithTreasure; this.random = r; } private void distributeTreasures() throws AlgorithmTimeoutedException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException { if (this.graphSearchInput == null) { throw new IllegalStateException("Cannot distribute treasures before graph generator has been set."); } this.logger.info("Start treasure distribution. Will choose {} treasure islands.", this.numberOfIslandsWithTreasure); RandomSearch<ITransparentTreeNode, Integer> rs = new RandomSearch<>(this.graphSearchInput); while (this.indicesOfIslands.size() < this.numberOfIslandsWithTreasure) { ILabeledPath<ITransparentTreeNode, Integer> treasurePath = rs.nextSolutionCandidate(); this.indicesOfIslands.add(this.getIslandModel().getIsland(treasurePath)); } if (this.indicesOfIslands.size() != this.numberOfIslandsWithTreasure) { throw new IllegalStateException("Treasure distribution failed! Distributed " + this.indicesOfIslands.size() + " instead of " + this.numberOfIslandsWithTreasure + " treasurs."); } this.logger.info("Defined {} treasure islands: {}", this.numberOfIslandsWithTreasure, this.indicesOfIslands); this.treasuresDistributed = true; } @Override public double getMeanOfIsland(final BigInteger island) { if (this.indicesOfIslands.isEmpty()) { try { this.distributeTreasures(); } catch (AlgorithmTimeoutedException | AlgorithmExecutionCanceledException | AlgorithmException e) { return Double.NaN; } catch (InterruptedException e) { Thread.currentThread().interrupt(); return Double.NaN; } } final Random r1 = new Random(this.random.nextInt() + (long)island.intValue()); // this randomness includes the random source of the generator return this.means.computeIfAbsent(island, p -> this.isTreasureIsland(p) ? 1 + r1.nextDouble() * 5 : 20 + r1.nextDouble() * 85); } public boolean isTreasureIsland(final BigInteger island) { if (this.indicesOfIslands.isEmpty()) { try { this.distributeTreasures(); } catch (AlgorithmTimeoutedException | AlgorithmExecutionCanceledException | AlgorithmException e) { return false; } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; } } return this.indicesOfIslands.contains(island); } public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { return this.isTreasureIsland(this.getIslandModel().getIsland(path)); } public Collection<BigInteger> getTreasureIslands() { return Collections.unmodifiableCollection(this.indicesOfIslands); } @Override public double getMinimumAchievable() { throw new UnsupportedOperationException(); } public boolean isTreasuresDistributed() { return this.treasuresDistributed; } public IPathSearchInput<ITransparentTreeNode, Integer> getGraphSearchInput() { return this.graphSearchInput; } public void setGraphSearchInput(final IPathSearchInput<ITransparentTreeNode, Integer> graphSearchInput) { this.graphSearchInput = graphSearchInput; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/noisymean/LinkedTreasureIslandPathCostGenerator.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean; import java.math.BigInteger; import java.util.HashMap; import java.util.Map; import java.util.function.Function; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; public class LinkedTreasureIslandPathCostGenerator extends NoisyMeanTreasureModel { private final Function<BigInteger, Double> meanFunction; private final Map<BigInteger, Double> explicitlyEvaluatedMeans = new HashMap<>(); public LinkedTreasureIslandPathCostGenerator(final IIslandModel islandModel, final Function<BigInteger, Double> meanFunction) { super(islandModel); this.meanFunction = meanFunction; } @Override public double getMeanOfIsland(final BigInteger island) { return this.explicitlyEvaluatedMeans.computeIfAbsent(island, p -> this.meanFunction.apply(island)); } @Override public double getMinimumAchievable() { throw new UnsupportedOperationException(); } @Override public boolean isPathToTreasureIsland(final ILabeledPath<ITransparentTreeNode, Integer> path) { throw new UnsupportedOperationException(); } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/noisymean/NoisyMeanTreasureModel.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean; import java.math.BigInteger; import java.util.Random; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.search.syntheticgraphs.graphmodels.ITransparentTreeNode; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.ITreasureModel; import ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.AIslandTreasureModel; /** * In this model, every island has a mean, and the scores are distributed closely around this mean. * * The mean itself is defined by a more concrete subclass. * * @author fmohr * * @param <N> */ public abstract class NoisyMeanTreasureModel extends AIslandTreasureModel implements ITreasureModel { public NoisyMeanTreasureModel(final IIslandModel islandModel) { super(islandModel); } public abstract double getMeanOfIsland(BigInteger island); @Override public Double evaluate(final ILabeledPath<ITransparentTreeNode, Integer> path) throws PathEvaluationException, InterruptedException { this.getIslandModel().setRootNode(path.getRoot()); BigInteger island = this.getIslandModel().getIsland(path); double mean = this.getMeanOfIsland(island); double maxDeviationFactor = mean < 10 ? mean : Math.sqrt(mean); final Random r2 = new Random(path.hashCode()); boolean add = r2.nextBoolean(); double deviation = r2.nextDouble() * maxDeviationFactor * (add ? 1 : -1); double score = Math.max(0, mean + deviation); /* avoid that sub-optimal islands come into the region below 1 and vice versa */ if (mean < 10) { score = Math.min(score, 9); } else { score = Math.max(11, score); } return score; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/syntheticgraphs/treasuremodels/islands/noisymean/ShiftedSineTreasureGenerator.java
package ai.libs.jaicore.search.syntheticgraphs.treasuremodels.islands.noisymean; import java.math.BigDecimal; import java.math.BigInteger; import ai.libs.jaicore.search.syntheticgraphs.islandmodels.IIslandModel; public class ShiftedSineTreasureGenerator extends ATreasureMeanFunction { private final double c; // private final double d; // public ShiftedSineTreasureGenerator(final IIslandModel islandModel, final long numberOfTreasures, final double c, final double d) { super(islandModel, numberOfTreasures); this.c = c; this.d = d; } @Override public Double apply(final BigInteger t) { double max = this.getNumberOfTreasures() * 2 * Math.PI; double positionInInterval = new BigDecimal(t).multiply(BigDecimal.valueOf(max)).divide(BigDecimal.valueOf(this.getTotalNumberOfIslands().intValue())).doubleValue(); int periodOffset = (int) Math.floor(positionInInterval / (2 * Math.PI)); double positionInPeriod = positionInInterval % (2 * Math.PI); double shiftedPosition; if (positionInPeriod < 0.5*(Math.PI- this.c)) { shiftedPosition = (1 - this.d / (0.5 * (Math.PI - this.c))) * positionInPeriod; } else if (positionInPeriod > 0.5 * (Math.PI + this.c)) { shiftedPosition = (1 - this.d / (2 * Math.PI - 0.5 * (Math.PI + this.c))) * positionInPeriod + (2 * Math.PI * this.d) / (2 * Math.PI - 0.5 * (Math.PI + this.c)); } else { shiftedPosition = (1 + 2 * this.d / this.c) * positionInPeriod + this.d * ((this.c - Math.PI) / this.c - 1); } shiftedPosition += Math.PI * 2 * periodOffset; return (-1 * Math.sin(shiftedPosition) + 1) * 50; // this produces values in [0,100] } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/util/CycleDetectedResult.java
package ai.libs.jaicore.search.util; import java.util.List; public class CycleDetectedResult<N> extends SanityCheckResult { private final List<N> wholePath; private final N duplicateNode; public CycleDetectedResult(List<N> wholePath, N duplicateNode) { super(); this.wholePath = wholePath; this.duplicateNode = duplicateNode; } public List<N> getWholePath() { return wholePath; } public N getDuplicateNode() { return duplicateNode; } @Override public String toString() { return "CycleDetectedResult [wholePath=" + wholePath + ", duplicateNode=" + duplicateNode + "]"; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/util/DeadEndDetectedResult.java
package ai.libs.jaicore.search.util; public class DeadEndDetectedResult<N> extends SanityCheckResult { private final N deadEnd; public DeadEndDetectedResult(N deadEnd) { super(); this.deadEnd = deadEnd; } public N getDeadEnd() { return deadEnd; } @Override public String toString() { return "DeadEndDetectedResult [deadEnd=" + deadEnd + "]"; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/util/GraphSanityChecker.java
package ai.libs.jaicore.search.util; import java.util.List; import java.util.Stack; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.graphsearch.problem.implicit.graphgenerator.IPathGoalTester; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.datastructure.graph.implicit.INewNodeDescription; import org.api4.java.datastructure.graph.implicit.ISingleRootGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.graphvisualizer.events.graph.GraphInitializedEvent; import ai.libs.jaicore.graphvisualizer.events.graph.NodeAddedEvent; import ai.libs.jaicore.graphvisualizer.events.graph.NodeTypeSwitchEvent; import ai.libs.jaicore.search.core.interfaces.AOptimalPathInORGraphSearch; import ai.libs.jaicore.search.model.travesaltree.BackPointerPath; public class GraphSanityChecker<N, A> extends AOptimalPathInORGraphSearch<IPathSearchInput<N, A>, N, A, Double> { private Logger logger = LoggerFactory.getLogger(GraphSanityChecker.class); private String loggerName; private SanityCheckResult sanityCheckResult; private final int maxNodesToExpand; private boolean detectCycles = true; private boolean detectDeadEnds = true; public GraphSanityChecker(final IPathSearchInput<N, A> problem, final int maxNodesToExpand) { super(problem); this.maxNodesToExpand = maxNodesToExpand; } @Override public IAlgorithmEvent nextWithException() throws InterruptedException { switch (this.getState()) { case CREATED: return this.activate(); case ACTIVE: int expanded = 0; Stack<BackPointerPath<N, A, ?>> open = new Stack<>(); N root = ((ISingleRootGenerator<N>) this.getGraphGenerator().getRootGenerator()).getRoot(); IPathGoalTester<N, A> goalTester = this.getGoalTester(); open.push(new BackPointerPath<>(null, root, null)); this.post(new GraphInitializedEvent<N>(this, root)); while (!open.isEmpty() && expanded < this.maxNodesToExpand) { BackPointerPath<N, A, ?> node = open.pop(); if (!node.isGoal()) { this.post(new NodeTypeSwitchEvent<>(this, node, "or_closed")); } expanded++; List<INewNodeDescription<N, A>> successors = this.getGraphGenerator().getSuccessorGenerator().generateSuccessors(node.getHead()); if (this.detectDeadEnds && successors.isEmpty() && !node.isGoal()) { this.sanityCheckResult = new DeadEndDetectedResult<N>(node.getHead()); break; } for (INewNodeDescription<N, A> successor : successors) { if (this.detectCycles && node.getNodes().contains(successor.getTo())) { List<N> path = node.getNodes(); path.add(successor.getTo()); this.sanityCheckResult = new CycleDetectedResult<N>(path, node.getHead()); break; } BackPointerPath<N, A, ?> newNode = new BackPointerPath<>(node, successor.getTo(), successor.getArcLabel()); newNode.setGoal(goalTester.isGoal(newNode)); open.add(newNode); this.post(new NodeAddedEvent<N>(this, node.getHead(), successor.getTo(), newNode.isGoal() ? "or_solution" : "or_open")); } if (this.sanityCheckResult != null) { break; } if (expanded % 100 == 0 || expanded == this.maxNodesToExpand) { this.logger.debug("Expanded {}/{} nodes.", expanded, this.maxNodesToExpand); } } this.shutdown(); return this.terminate(); default: throw new IllegalStateException("Cannot do anything in state " + this.getState()); } } public SanityCheckResult getSanityCheck() { return this.sanityCheckResult != null ? this.sanityCheckResult : new GraphSeemsSaneResult(); } @Override public String getLoggerName() { return this.loggerName; } @Override public void setLoggerName(final String name) { this.logger.info("Switching logger from {} to {}", this.logger.getName(), name); this.loggerName = name; this.logger = LoggerFactory.getLogger(name); this.logger.info("Activated logger {} with name {}", name, this.logger.getName()); super.setLoggerName(this.loggerName + "._orgraphsearch"); } public boolean isDetectCycles() { return this.detectCycles; } public void setDetectCycles(final boolean detectCycles) { this.detectCycles = detectCycles; } public boolean isDetectDeadEnds() { return this.detectDeadEnds; } public void setDetectDeadEnds(final boolean detectDeadEnds) { this.detectDeadEnds = detectDeadEnds; } }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/util/GraphSeemsSaneResult.java
package ai.libs.jaicore.search.util; public class GraphSeemsSaneResult extends SanityCheckResult { }
0
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search
java-sources/ai/libs/jaicore-search/0.2.7/ai/libs/jaicore/search/util/SanityCheckResult.java
package ai.libs.jaicore.search.util; public class SanityCheckResult { }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/AbstractMLPlanBuilder.java
package ai.libs.mlplan.core; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.LinkedList; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.aeonbits.owner.ConfigFactory; import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.Timeout; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.model.Component; import ai.libs.hasco.model.Parameter; import ai.libs.hasco.model.ParameterRefinementConfiguration; import ai.libs.hasco.serialization.ComponentLoader; import ai.libs.hasco.variants.forwarddecomposition.HASCOViaFDFactory; import ai.libs.hasco.variants.forwarddecomposition.twophase.HASCOWithRandomCompletionsConfig; import ai.libs.jaicore.basic.FileUtil; import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig; import ai.libs.jaicore.basic.IOwnerBasedRandomConfig; import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction; import ai.libs.jaicore.basic.reconstruction.ReconstructionUtil; import ai.libs.jaicore.ml.classification.loss.dataset.EClassificationPerformanceMeasure; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.MonteCarloCrossValidationEvaluatorFactory; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.algorithms.standard.bestfirst.BestFirstFactory; import ai.libs.jaicore.search.algorithms.standard.bestfirst.StandardBestFirstFactory; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.AlternativeNodeEvaluator; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS; import ai.libs.mlplan.multiclass.MLPlanClassifierConfig; /** * The MLPlanBuilder helps to easily configure and initialize ML-Plan with specific parameter settings. * For convenient use, the MLPlanBuilder also offers methods for initializing ML-Plan with default * configuration to use ML-Plan for single label classification in combination with WEKA or scikit-learn * or for multi-label classification in combination with MEKA and consequently with WEKA (for baselearners * of multi-label reduction strategies). * * @author mwever, fmohr */ public abstract class AbstractMLPlanBuilder<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>, B extends AbstractMLPlanBuilder<L, B>> implements IMLPlanBuilder<L, B>, ILoggingCustomizable { /* Logging */ private Logger logger = LoggerFactory.getLogger(AbstractMLPlanBuilder.class); private String loggerName = AbstractMLPlanBuilder.class.getName(); private static final String RES_ALGORITHM_CONFIG = "mlplan/mlplan.properties"; private static final String FS_ALGORITHM_CONFIG = "conf/mlplan.properties"; protected static final int DEFAULT_SEARCH_NUM_MC_ITERATIONS = 5; protected static final double DEFAULT_SEARCH_TRAIN_FOLD_SIZE = 0.7; protected static final int DEFAULT_SELECTION_NUM_MC_ITERATIONS = 5; protected static final double DEFAULT_SELECTION_TRAIN_FOLD_SIZE = 0.7; protected static final IDeterministicPredictionPerformanceMeasure<Object, Object> DEFAULT_PERFORMANCE_MEASURE = EClassificationPerformanceMeasure.ERRORRATE; /* Default configuration values */ private static final File DEF_ALGORITHM_CONFIG = FileUtil.getExistingFileWithHighestPriority(RES_ALGORITHM_CONFIG, FS_ALGORITHM_CONFIG); /* Builder (self) status variables */ private boolean factoryPreparedWithData = false; /* Data for initializing ML-Plan */ private MLPlanClassifierConfig algorithmConfig; @SuppressWarnings("rawtypes") private HASCOViaFDFactory hascoFactory = new HASCOViaFDFactory<GraphSearchWithPathEvaluationsInput<TFDNode, String, Double>, Double>(); private Predicate<TFDNode> priorizingPredicate = null; private File searchSpaceFile; private String requestedHASCOInterface; private ILearnerFactory<L> learnerFactory; /* Node Evaluation */ private IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator = null; private PipelineValidityCheckingNodeEvaluator pipelineValidityCheckingNodeEvaluator; /* Candidate Evaluation (if no other node evaluation is used) */ private IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> searchSelectionDatasetSplitter; protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> factoryForPipelineEvaluationInSearchPhase = null; protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> factoryForPipelineEvaluationInSelectionPhase = null; private Collection<Component> components = new LinkedList<>(); /* The problem input for ML-Plan. */ private ILabeledDataset<?> dataset; protected AbstractMLPlanBuilder() { super(); this.withAlgorithmConfigFile(DEF_ALGORITHM_CONFIG); this.withRandomCompletionBasedBestFirstSearch(); this.withSeed(0); } /** * This ADDs a new preferred node evaluator; requires that the search will be a best-first search. * * It is possible to specify several preferred node evaluators, which will be ordered by the order in which they are specified. The latest given evaluator is the most preferred one. * * @param preferredNodeEvaluator * @return */ public B withPreferredNodeEvaluator(final IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator) { if (this.factoryPreparedWithData) { throw new IllegalStateException("The method prepareNodeEvaluatorInFactoryWithData has already been called. No changes to the preferred node evaluator possible anymore"); } /* first update the preferred node evaluator */ if (this.preferredNodeEvaluator == null) { this.preferredNodeEvaluator = preferredNodeEvaluator; } else { this.preferredNodeEvaluator = new AlternativeNodeEvaluator<>(preferredNodeEvaluator, this.preferredNodeEvaluator); } this.update(); return this.getSelf(); } @SuppressWarnings("unchecked") public B withSearchFactory(@SuppressWarnings("rawtypes") final IOptimalPathInORGraphSearchFactory searchFactory, @SuppressWarnings("rawtypes") final AlgorithmicProblemReduction transformer) { this.hascoFactory.setSearchFactory(searchFactory); this.hascoFactory.setSearchProblemTransformer(transformer); return this.getSelf(); } @SuppressWarnings("unchecked") public B withRandomCompletionBasedBestFirstSearch() { this.hascoFactory.setSearchFactory(new StandardBestFirstFactory<TFDNode, String, Double>()); this.update(); return this.getSelf(); } public Collection<Component> getComponents() throws IOException { return new ComponentLoader(this.searchSpaceFile).getComponents(); } public Map<Component, Map<Parameter, ParameterRefinementConfiguration>> getComponentParameterConfigurations() throws IOException { return new ComponentLoader(this.searchSpaceFile).getParamConfigs(); } /***********************************************************************************************************************************************************************************************************************/ /***********************************************************************************************************************************************************************************************************************/ /***********************************************************************************************************************************************************************************************************************/ /***********************************************************************************************************************************************************************************************************************/ /** * Loads the MLPlanClassifierConfig with default values and replaces all properties according to the properties defined in the given config file. * * @param algorithmConfigFile The file specifying the property values to replace the default configuration. * @return The MLPlanBuilder object. * @throws IOException An IOException is thrown if there are issues reading the config file. */ public B withAlgorithmConfigFile(final File algorithmConfigFile) { return this.withAlgorithmConfig((MLPlanClassifierConfig) ConfigFactory.create(MLPlanClassifierConfig.class).loadPropertiesFromFile(algorithmConfigFile)); } /** * Loads the MLPlanClassifierConfig with default values and replaces all properties according to the properties defined in the given config file. * * @param config The algorithm configuration. * @return The MLPlanBuilder object. * @throws IOException An IOException is thrown if there are issues reading the config file. */ public B withAlgorithmConfig(final MLPlanClassifierConfig config) { this.algorithmConfig = config; this.hascoFactory.withAlgorithmConfig(this.algorithmConfig); this.update(); return this.getSelf(); } /** * Set the data for which ML-Plan is supposed to find the best pipeline. * * @param dataset The dataset for which ML-Plan is to be run. * @return The builder object. */ public B withDataset(final ILabeledDataset<?> dataset) { if (!ReconstructionUtil.areInstructionsNonEmptyIfReconstructibilityClaimed(dataset)) { this.logger.warn("The dataset claims to be reconstructible, but it does not carry any instructions."); } this.dataset = dataset; return this.getSelf(); } public ILabeledDataset<?> getDataset() { return this.dataset; } /** * Specify the search space in which ML-Plan is required to work. * * @param searchSpaceConfig The file of the search space configuration. * @return The builder object. * @throws IOException Thrown if the given file does not exist. */ public B withSearchSpaceConfigFile(final File searchSpaceConfig) throws IOException { FileUtil.requireFileExists(searchSpaceConfig); this.searchSpaceFile = searchSpaceConfig; this.components.clear(); this.components.addAll(new ComponentLoader(this.searchSpaceFile).getComponents()); return this.getSelf(); } /** * Set the classifier factory that translates <code>CompositionInstance</code> objects to classifiers that can be evaluated. * * @param classifierFactory The classifier factory to be used to translate CompositionInstance objects to classifiers. * @return The builder object. */ public B withClassifierFactory(final ILearnerFactory<L> classifierFactory) { this.learnerFactory = classifierFactory; return this.getSelf(); } /** * Set the dataset splitter that is used for generating the holdout data portion that is put aside during search. * * @param datasetSplitter The dataset splitter to be used. * @return The builder obect. */ public B withDatasetSplitterForSearchSelectionSplit(final IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> datasetSplitter) { this.searchSelectionDatasetSplitter = datasetSplitter; return this.getSelf(); } public B withRequestedInterface(final String requestedInterface) { this.requestedHASCOInterface = requestedInterface; return this.getSelf(); } /** * @param timeout The timeout for ML-Plan to search for the best classifier. * @return The builder object. */ public B withTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(IOwnerBasedAlgorithmConfig.K_TIMEOUT, timeout.milliseconds() + ""); this.update(); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getTimeOut() { return new Timeout(this.algorithmConfig.timeout(), TimeUnit.MILLISECONDS); } /** * @param timeout The timeout for a single candidate evaluation. * @return The builder object. */ public B withNodeEvaluationTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(HASCOWithRandomCompletionsConfig.K_RANDOM_COMPLETIONS_TIMEOUT_NODE, timeout.milliseconds() + ""); this.update(); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getNodeEvaluationTimeOut() { return new Timeout(this.algorithmConfig.timeoutForNodeEvaluation(), TimeUnit.MILLISECONDS); } /** * @param timeout The timeout for a single candidate evaluation. * @return The builder object. */ public B withCandidateEvaluationTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(HASCOWithRandomCompletionsConfig.K_RANDOM_COMPLETIONS_TIMEOUT_PATH, timeout.milliseconds() + ""); this.update(); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getCandidateEvaluationTimeOut() { return new Timeout(this.algorithmConfig.timeoutForCandidateEvaluation(), TimeUnit.MILLISECONDS); } public MonteCarloCrossValidationEvaluatorFactory withMCCVBasedCandidateEvaluationInSearchPhase() { this.factoryForPipelineEvaluationInSearchPhase = new MonteCarloCrossValidationEvaluatorFactory(); return ((MonteCarloCrossValidationEvaluatorFactory) this.factoryForPipelineEvaluationInSearchPhase).withNumMCIterations(DEFAULT_SEARCH_NUM_MC_ITERATIONS).withTrainFoldSize(DEFAULT_SEARCH_TRAIN_FOLD_SIZE) .withMeasure(DEFAULT_PERFORMANCE_MEASURE); } public MonteCarloCrossValidationEvaluatorFactory withMCCVBasedCandidateEvaluationInSelectionPhase() { this.factoryForPipelineEvaluationInSelectionPhase = new MonteCarloCrossValidationEvaluatorFactory(); return ((MonteCarloCrossValidationEvaluatorFactory) this.factoryForPipelineEvaluationInSelectionPhase).withNumMCIterations(DEFAULT_SELECTION_NUM_MC_ITERATIONS).withTrainFoldSize(DEFAULT_SELECTION_TRAIN_FOLD_SIZE) .withMeasure(DEFAULT_PERFORMANCE_MEASURE); } @Override public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSearchPhase() { return this.factoryForPipelineEvaluationInSearchPhase; } @Override public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSelectionPhase() { return this.factoryForPipelineEvaluationInSelectionPhase; } /** * Sets the evaluator factory for the search phase. * * @param evaluatorFactory The evaluator factory for the search phase. * @return The builder object. */ public void withSearchPhaseEvaluatorFactory(final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactory) { this.factoryForPipelineEvaluationInSearchPhase = evaluatorFactory; } /** * @return The factory for the classifier evaluator of the search phase. */ protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getSearchEvaluatorFactory() { return this.factoryForPipelineEvaluationInSearchPhase; } /** * Sets the evaluator factory for the selection phase. * * @param evaluatorFactory The evaluator factory for the selection phase. * @return The builder object. */ public B withSelectionPhaseEvaluatorFactory(final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactory) { this.factoryForPipelineEvaluationInSelectionPhase = evaluatorFactory; return this.getSelf(); } /** * Sets the number of cpus that may be used by ML-Plan. * * @param numCpus The number of cpus to use. * @return The builder object. */ public B withNumCpus(final int numCpus) { this.algorithmConfig.setProperty(IOwnerBasedAlgorithmConfig.K_CPUS, numCpus + ""); this.update(); return this.getSelf(); } public B withSeed(final long seed) { this.algorithmConfig.setProperty(IOwnerBasedRandomConfig.K_SEED, seed + ""); this.update(); return this.getSelf(); } /** * @return The factory for the classifier evaluator of the selection phase. */ protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getSelectionEvaluatorFactory() { return this.factoryForPipelineEvaluationInSelectionPhase; } @Override public HASCOViaFDFactory getHASCOFactory() { return this.hascoFactory; } @Override public ILearnerFactory<L> getLearnerFactory() { return this.learnerFactory; } @Override public String getLoggerName() { return this.loggerName; } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); this.loggerName = name; } @Override public String getRequestedInterface() { return this.requestedHASCOInterface; } @Override public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<? extends ILabeledInstance>> getSearchSelectionDatasetSplitter() { return this.searchSelectionDatasetSplitter; } @Override public File getSearchSpaceConfigFile() { return this.searchSpaceFile; } @Override public MLPlanClassifierConfig getAlgorithmConfig() { return this.algorithmConfig; } public B withPipelineValidityCheckingNodeEvaluator(final PipelineValidityCheckingNodeEvaluator ne) { this.pipelineValidityCheckingNodeEvaluator = ne; return this.getSelf(); } public void prepareNodeEvaluatorInFactoryWithData() { if (!(this.hascoFactory.getSearchFactory() instanceof BestFirstFactory)) { return; } if (this.factoryPreparedWithData) { throw new IllegalStateException("Factory has already been prepared with data. This can only be done once!"); } this.factoryPreparedWithData = true; /* nothing to do if there are no preferred node evaluators */ if (this.pipelineValidityCheckingNodeEvaluator == null && this.preferredNodeEvaluator == null) { return; } /* now determine the real node evaluator to be used. A semantic node evaluator has highest priority */ IPathEvaluator<TFDNode, String, Double> actualNodeEvaluator; if (this.pipelineValidityCheckingNodeEvaluator != null) { this.pipelineValidityCheckingNodeEvaluator.setComponents(this.components); this.pipelineValidityCheckingNodeEvaluator.setData(this.dataset); if (this.preferredNodeEvaluator != null) { actualNodeEvaluator = new AlternativeNodeEvaluator<>(this.pipelineValidityCheckingNodeEvaluator, this.preferredNodeEvaluator); } else { actualNodeEvaluator = this.pipelineValidityCheckingNodeEvaluator; } } else { actualNodeEvaluator = this.preferredNodeEvaluator; } /* update the preferred node evaluator in the HascoFactory */ this.preferredNodeEvaluator = actualNodeEvaluator; this.update(); } @SuppressWarnings("unchecked") private void update() { this.hascoFactory.setSearchProblemTransformer(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS<TFDNode, String, Double>(this.preferredNodeEvaluator, this.priorizingPredicate, this.algorithmConfig.randomSeed(), this.algorithmConfig.numberOfRandomCompletions(), this.algorithmConfig.timeoutForCandidateEvaluation(), this.algorithmConfig.timeoutForNodeEvaluation())); this.hascoFactory.withAlgorithmConfig(this.getAlgorithmConfig()); } public B withPortionOfDataReservedForSelection(final double value) { this.algorithmConfig.setProperty(MLPlanClassifierConfig.SELECTION_PORTION, value + ""); return this.getSelf(); } @Override public double getPortionOfDataReservedForSelectionPhase() { return this.algorithmConfig.dataPortionForSelection(); } /** * Builds an ML-Plan object for the given dataset as input. * * @param dataset The dataset for which an ML-Plan object is to be built. * @return The ML-Plan object configured with this builder. */ public MLPlan<L> build(final ILabeledDataset<?> dataset) { return this.withDataset(dataset).build(); } public void checkPreconditionsForInitialization() { Objects.requireNonNull(this.dataset, "A dataset needs to be provided as input to ML-Plan"); Objects.requireNonNull(this.searchSelectionDatasetSplitter, "Dataset splitter for search phase must be set!"); Objects.requireNonNull(this.requestedHASCOInterface, "No requested HASCO interface defined!"); } /** * Builds an ML-Plan object with the dataset provided earlier to this builder. * * @return The ML-Plan object configured with this builder. */ public MLPlan<L> build() { this.checkPreconditionsForInitialization(); this.prepareNodeEvaluatorInFactoryWithData(); // inform node evaluator about data and create the MLPlan object return new MLPlan<>(this, this.dataset); } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/ILearnerFactory.java
package ai.libs.mlplan.core; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.hasco.optimizingfactory.BaseFactory; public interface ILearnerFactory<L extends ISupervisedLearner<?, ?>> extends BaseFactory<L> { }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/IMLPlanBuilder.java
package ai.libs.mlplan.core; import java.io.File; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.hasco.core.HASCOFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.LearnerEvaluatorConstructionFailedException; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.mlplan.multiclass.MLPlanClassifierConfig; /** * The IMLPlanBuilder provides the general interface of an ML-Plan builder independent * of the problem domain or specific library that is used for the configuration of machine * learning pipelines. * * @author mwever * @author fmohr * */ public interface IMLPlanBuilder<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>, B extends IMLPlanBuilder<L, B>> { /** * This is the splitter that splits the given input data into data for the search phase and for the selection phase * @return */ public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<? extends ILabeledInstance>> getSearchSelectionDatasetSplitter(); /** * This is the factory that will be used to create the pipeline evaluators for evaluation during search time * @return * @throws LearnerEvaluatorConstructionFailedException */ public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSearchPhase(); /** * This is the factory that will be used to create the pipeline evaluators for evaluation during selection time * @return * @throws LearnerEvaluatorConstructionFailedException */ public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSelectionPhase(); public String getRequestedInterface(); public File getSearchSpaceConfigFile(); public ILearnerFactory<L> getLearnerFactory(); public HASCOFactory<GraphSearchWithPathEvaluationsInput<TFDNode, String, Double>, TFDNode, String, Double> getHASCOFactory(); public MLPlanClassifierConfig getAlgorithmConfig(); public double getPortionOfDataReservedForSelectionPhase(); public B getSelf(); }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/MLPlan.java
package ai.libs.mlplan.core; import java.io.IOException; import java.util.List; import java.util.Objects; import java.util.Random; import java.util.concurrent.TimeUnit; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.ml.core.IDataConfigurable; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.IAlgorithm; import org.api4.java.algorithm.Timeout; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.control.IRandomConfigurable; import org.api4.java.common.event.IEvent; import org.api4.java.common.reconstruction.IReconstructible; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import ai.libs.hasco.core.HASCO; import ai.libs.hasco.core.HASCOFactory; import ai.libs.hasco.core.HASCOSolutionCandidate; import ai.libs.hasco.events.HASCOSolutionEvent; import ai.libs.hasco.events.TwoPhaseHASCOPhaseSwitchEvent; import ai.libs.hasco.exceptions.ComponentInstantiationFailedException; import ai.libs.hasco.model.ComponentInstance; import ai.libs.hasco.optimizingfactory.OptimizingFactory; import ai.libs.hasco.optimizingfactory.OptimizingFactoryProblem; import ai.libs.hasco.variants.forwarddecomposition.twophase.TwoPhaseHASCO; import ai.libs.hasco.variants.forwarddecomposition.twophase.TwoPhaseHASCOConfig; import ai.libs.hasco.variants.forwarddecomposition.twophase.TwoPhaseHASCOFactory; import ai.libs.hasco.variants.forwarddecomposition.twophase.TwoPhaseSoftwareConfigurationProblem; import ai.libs.jaicore.basic.MathExt; import ai.libs.jaicore.basic.algorithm.AAlgorithm; import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent; import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent; import ai.libs.jaicore.basic.reconstruction.ReconstructionUtil; import ai.libs.jaicore.ml.core.dataset.DatasetUtil; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.LearnerEvaluatorConstructionFailedException; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.probleminputs.GraphSearchInput; import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput; import ai.libs.mlplan.core.events.ClassifierFoundEvent; import ai.libs.mlplan.core.events.MLPlanPhaseSwitchedEvent; import ai.libs.mlplan.multiclass.MLPlanClassifierConfig; public class MLPlan<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> extends AAlgorithm<ILabeledDataset<?>, L> implements ILoggingCustomizable { /** Logger for controlled output. */ private Logger logger = LoggerFactory.getLogger(MLPlan.class); private String loggerName; private L selectedClassifier; private double internalValidationErrorOfSelectedClassifier; private ComponentInstance componentInstanceOfSelectedClassifier; private final IMLPlanBuilder<L, ?> builder; private TwoPhaseHASCOFactory<GraphSearchWithPathEvaluationsInput<TFDNode, String, Double>, TFDNode, String> twoPhaseHASCOFactory; private OptimizingFactory<TwoPhaseSoftwareConfigurationProblem, L, HASCOSolutionCandidate<Double>, Double> optimizingFactory; private boolean buildSelectedClasifierOnGivenData = true; private final long seed; private long timestampAlgorithmStart; private boolean maintainReconstructibility = true; protected MLPlan(final IMLPlanBuilder<L, ?> builder, final ILabeledDataset<?> data) { // ML-Plan has a package visible constructor, because it should only be constructed using a builder super(builder.getAlgorithmConfig(), data); /* sanity checks */ if (builder.getSearchSpaceConfigFile() == null || !builder.getSearchSpaceConfigFile().exists()) { throw new IllegalArgumentException("The search space configuration file must be set in MLPlanBuilder, and it must be set to a file that exists!"); } if (builder.getLearnerFactory() == null) { throw new IllegalArgumentException("ClassifierFactory must be set in MLPlanBuilder!"); } if (builder.getRequestedInterface() == null || builder.getRequestedInterface().isEmpty()) { throw new IllegalArgumentException("No requested HASCO interface defined!"); } /* store builder and data for main algorithm */ this.builder = builder; this.setTimeout(new Timeout(builder.getAlgorithmConfig().timeout(), TimeUnit.MILLISECONDS)); Objects.requireNonNull(this.getInput()); if (this.getInput().isEmpty()) { throw new IllegalArgumentException("Cannot run ML-Plan on empty dataset."); } this.seed = this.builder.getAlgorithmConfig().seed(); if (this.getInput() instanceof IReconstructible) { this.maintainReconstructibility = ReconstructionUtil.areInstructionsNonEmptyIfReconstructibilityClaimed(this.getInput()); if (!this.maintainReconstructibility) { this.logger.warn("The dataset claims to be reconstructible, but it does not carry any instructions. ML-Plan will not add reconstruction instructions."); } } else { this.maintainReconstructibility = false; } } @SuppressWarnings("unchecked") @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException { switch (this.getState()) { case CREATED: this.setTimeoutPrecautionOffset(Math.max(5000, this.getTimeoutPrecautionOffset())); // minimum 5 seconds precaution offset for timeouts this.logger.info("Starting an ML-Plan instance. Timeout precaution is {}ms", this.getTimeoutPrecautionOffset()); this.timestampAlgorithmStart = System.currentTimeMillis(); this.setDeadline(); // algorithm execution starts NOW, set deadline /* check number of CPUs assigned */ if (this.getConfig().cpus() < 1) { throw new IllegalStateException("Cannot generate search where number of CPUs is " + this.getConfig().cpus()); } /* set up exact splits */ final double dataPortionUsedForSelection = this.getConfig().dataPortionForSelection(); ILabeledDataset<?> dataShownToSearch; ILabeledDataset<?> dataShownToSelection; if (dataPortionUsedForSelection > 0) { try { int seed = this.getConfig().randomSeed(); IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> splitter = this.builder.getSearchSelectionDatasetSplitter(); if (splitter == null) { throw new IllegalArgumentException("The builder does not specify a dataset splitter for the separation between search and selection phase data."); } this.logger.debug("Splitting given {} data points into search data ({}%) and selection data ({}%) with splitter {}.", this.getInput().size(), MathExt.round((1 - dataPortionUsedForSelection) * 100, 2), MathExt.round(dataPortionUsedForSelection * 100, 2), splitter.getClass().getName()); if (splitter instanceof ILoggingCustomizable) { ((ILoggingCustomizable) splitter).setLoggerName(this.getLoggerName() + ".searchselectsplitter"); } List<ILabeledDataset<?>> split = splitter.split(this.getInput(), new Random(seed), dataPortionUsedForSelection); final int expectedSearchSize = (int) Math.round(this.getInput().size() * (1 - dataPortionUsedForSelection)); // attention; this is a bit tricky (data portion for selection is in 0) final int expectedSelectionSize = this.getInput().size() - expectedSearchSize; if (Math.abs(expectedSearchSize - split.get(1).size()) > 1 || Math.abs(expectedSelectionSize - split.get(0).size()) > 1) { throw new IllegalStateException("Invalid split produced by " + splitter.getClass().getName() + "! Split sizes are " + split.get(1).size() + "/" + split.get(0).size() + " but expected sizes were " + expectedSearchSize + "/" + expectedSelectionSize); } dataShownToSearch = split.get(1); // attention; this is a bit tricky (data portion for selection is in 0) dataShownToSelection = this.getInput(); this.logger.debug("Search/Selection split completed. Using {} data points in search and {} in selection.", dataShownToSearch.size(), dataShownToSelection.size()); } catch (SplitFailedException e) { throw new AlgorithmException("Error in ML-Plan execution.", e); } } else { dataShownToSearch = this.getInput(); dataShownToSelection = null; this.logger.debug("Selection phase de-activated. Not splitting the data and giving everything to the search."); } if (dataShownToSearch.isEmpty()) { throw new IllegalStateException("Cannot search on no data."); } if (dataShownToSelection != null && dataShownToSelection.size() < dataShownToSearch.size()) { throw new IllegalStateException("The search data (" + dataShownToSearch.size() + " data points) are bigger than the selection data (" + dataShownToSelection.size() + " data points)!"); } /* check that class proportions are maintained */ if (this.logger.isDebugEnabled()) { this.logger.debug("Class distribution is {}. Original class distribution was {}", DatasetUtil.getLabelCounts(dataShownToSearch), DatasetUtil.getLabelCounts(this.getInput())); } /* check that reconstructibility is preserved */ if (this.maintainReconstructibility && ((IReconstructible) dataShownToSearch).getConstructionPlan().getInstructions().isEmpty()) { throw new IllegalStateException("Reconstructibility instructions have been lost in search/selection-split!"); } /* dynamically compute blow-ups */ if (Double.isNaN(this.getConfig().expectedBlowupInSelection())) { double blowUpInSelectionPhase = 1; this.getConfig().setProperty(MLPlanClassifierConfig.K_BLOWUP_SELECTION, String.valueOf(blowUpInSelectionPhase)); this.logger.info("No expected blow-up for selection phase has been defined. Automatically configuring {}", blowUpInSelectionPhase); } if (!this.buildSelectedClasifierOnGivenData) { this.getConfig().setProperty(MLPlanClassifierConfig.K_BLOWUP_POSTPROCESS, String.valueOf(0)); this.logger.info("Selected classifier won't be built, so now blow-up is calculated."); } else if (Double.isNaN(this.getConfig().expectedBlowupInPostprocessing())) { double blowUpInPostprocessing = 1; this.getConfig().setProperty(MLPlanClassifierConfig.K_BLOWUP_POSTPROCESS, String.valueOf(blowUpInPostprocessing)); this.logger.info("No expected blow-up for postprocessing phase has been defined. Automatically configuring {}", blowUpInPostprocessing); } /* setup the pipeline evaluators */ this.logger.debug("Setting up the pipeline evaluators."); ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactoryForSearch = this.builder.getLearnerEvaluationFactoryForSearchPhase(); if (evaluatorFactoryForSearch instanceof IRandomConfigurable) { ((IRandomConfigurable) evaluatorFactoryForSearch).setRandom(new Random(this.seed)); } if (evaluatorFactoryForSearch instanceof IDataConfigurable) { ((IDataConfigurable) evaluatorFactoryForSearch).setData(dataShownToSearch); } ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactoryForSelection = this.builder.getLearnerEvaluationFactoryForSelectionPhase(); if (evaluatorFactoryForSelection instanceof IRandomConfigurable) { ((IRandomConfigurable) evaluatorFactoryForSelection).setRandom(new Random(this.seed)); } if (evaluatorFactoryForSelection instanceof IDataConfigurable && dataShownToSelection != null) { ((IDataConfigurable) evaluatorFactoryForSelection).setData(dataShownToSelection); } PipelineEvaluator classifierEvaluatorForSearch; PipelineEvaluator classifierEvaluatorForSelection; try { classifierEvaluatorForSearch = new PipelineEvaluator(this.builder.getLearnerFactory(), evaluatorFactoryForSearch.getLearnerEvaluator(), this.getConfig().timeoutForCandidateEvaluation()); classifierEvaluatorForSelection = dataShownToSelection != null ? new PipelineEvaluator(this.builder.getLearnerFactory(), evaluatorFactoryForSelection.getLearnerEvaluator(), this.getConfig().timeoutForCandidateEvaluation()) : null; } catch (LearnerEvaluatorConstructionFailedException e2) { throw new AlgorithmException("Could not create the pipeline evaluator", e2); } classifierEvaluatorForSearch.registerListener(this); // events will be forwarded if (classifierEvaluatorForSelection != null) { classifierEvaluatorForSelection.registerListener(this); // events will be forwarded } /* communicate the parameters with which ML-Plan will run */ if (this.logger.isInfoEnabled()) { this.logger.info( "Starting ML-Plan with the following setup:\n\tDataset: {}\n\tCPUs: {}\n\tTimeout: {}s\n\tTimeout for single candidate evaluation: {}s\n\tTimeout for node evaluation: {}s\n\tRandom Completions per node evaluation: {}\n\tPortion of data for selection phase: {}%\n\tData points used during search: {}\n\tData points used during selection: {}\n\tPipeline evaluation during search: {}\n\tPipeline evaluation during selection: {}\n\tBlow-ups are {} for selection phase and {} for post-processing phase.", this.getInput().getRelationName(), this.getConfig().cpus(), this.getTimeout().seconds(), this.getConfig().timeoutForCandidateEvaluation() / 1000, this.getConfig().timeoutForNodeEvaluation() / 1000, this.getConfig().numberOfRandomCompletions(), MathExt.round(this.getConfig().dataPortionForSelection() * 100, 2), dataShownToSearch.size(), dataShownToSelection != null ? dataShownToSelection.size() : 0, classifierEvaluatorForSearch.getBenchmark(), classifierEvaluatorForSelection != null ? classifierEvaluatorForSelection.getBenchmark() : null, this.getConfig().expectedBlowupInSelection(), this.getConfig().expectedBlowupInPostprocessing()); } /* create 2-phase software configuration problem */ this.logger.debug("Creating 2-phase software configuration problem."); TwoPhaseSoftwareConfigurationProblem problem = null; try { problem = new TwoPhaseSoftwareConfigurationProblem(this.builder.getSearchSpaceConfigFile(), this.builder.getRequestedInterface(), classifierEvaluatorForSearch, classifierEvaluatorForSelection); } catch (IOException e1) { throw new AlgorithmException("Could not activate ML-Plan!", e1); } /* create 2-phase HASCO */ this.logger.info("Creating the twoPhaseHASCOFactory."); OptimizingFactoryProblem<TwoPhaseSoftwareConfigurationProblem, L, Double> optimizingFactoryProblem = new OptimizingFactoryProblem<>(this.builder.getLearnerFactory(), problem); HASCOFactory<GraphSearchWithPathEvaluationsInput<TFDNode, String, Double>, TFDNode, String, Double> hascoFactory = this.builder.getHASCOFactory(); this.twoPhaseHASCOFactory = new TwoPhaseHASCOFactory<>(hascoFactory); this.twoPhaseHASCOFactory.setConfig(this.getConfig().copy(TwoPhaseHASCOConfig.class)); // instantiate 2-Phase-HASCO with a config COPY to not have config changes in 2-Phase-HASCO impacts on the MLPlan configuration this.optimizingFactory = new OptimizingFactory<>(optimizingFactoryProblem, this.twoPhaseHASCOFactory); this.logger.info("Setting logger of {} to {}.optimizingfactory", this.optimizingFactory.getClass().getName(), this.loggerName); this.optimizingFactory.setLoggerName(this.loggerName + ".optimizingfactory"); this.optimizingFactory.registerListener(new Object() { @Subscribe public void receiveEventFromFactory(final IEvent event) { if (event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent) { return; } if (event instanceof TwoPhaseHASCOPhaseSwitchEvent) { MLPlan.this.post(new MLPlanPhaseSwitchedEvent(MLPlan.this)); } else if (event instanceof HASCOSolutionEvent) { @SuppressWarnings("unchecked") HASCOSolutionCandidate<Double> solution = ((HASCOSolutionEvent<Double>) event).getSolutionCandidate(); try { MLPlan.this.logger.info("Received new solution {} with score {} and evaluation time {}ms", solution.getComponentInstance().getNestedComponentDescription(), solution.getScore(), solution.getTimeToEvaluateCandidate()); } catch (Exception e) { MLPlan.this.logger.warn("Could not print log due to exception while preparing the log message.", e); } if (dataPortionUsedForSelection == 0.0 && solution.getScore() < MLPlan.this.internalValidationErrorOfSelectedClassifier) { try { MLPlan.this.selectedClassifier = MLPlan.this.builder.getLearnerFactory().getComponentInstantiation(solution.getComponentInstance()); MLPlan.this.internalValidationErrorOfSelectedClassifier = solution.getScore(); MLPlan.this.componentInstanceOfSelectedClassifier = solution.getComponentInstance(); } catch (ComponentInstantiationFailedException e) { MLPlan.this.logger.error("Could not update selectedClassifier with newly best seen solution due to issues building the classifier from its ComponentInstance description.", e); } } try { MLPlan.this.post(new ClassifierFoundEvent(MLPlan.this, solution.getComponentInstance(), MLPlan.this.builder.getLearnerFactory().getComponentInstantiation(solution.getComponentInstance()), solution.getScore())); } catch (ComponentInstantiationFailedException e) { MLPlan.this.logger.error("An issue occurred while preparing the description for the post of a ClassifierFoundEvent", e); } } else { MLPlan.this.post(event); } } }); this.optimizingFactory.setTimeout(this.getRemainingTimeToDeadline()); this.logger.info("Initializing the optimization factory."); this.optimizingFactory.init(); AlgorithmInitializedEvent event = this.activate(); this.logger.info("Started and activated ML-Plan."); return event; case ACTIVE: /* train the classifier returned by the optimizing factory */ long startOptimizationTime = System.currentTimeMillis(); try { this.selectedClassifier = this.optimizingFactory.call(); this.logger.info("2-Phase-HASCO has chosen classifier {}, which will now be built on the entire data given, i.e. {} data points.", this.selectedClassifier, this.getInput().size()); } catch (AlgorithmException | InterruptedException | AlgorithmExecutionCanceledException | AlgorithmTimeoutedException e) { this.terminate(); // send the termination event throw e; } this.internalValidationErrorOfSelectedClassifier = this.optimizingFactory.getPerformanceOfObject(); this.componentInstanceOfSelectedClassifier = this.optimizingFactory.getComponentInstanceOfObject(); if (this.buildSelectedClasifierOnGivenData) { long startBuildTime = System.currentTimeMillis(); try { this.selectedClassifier.fit(this.getInput()); } catch (Exception e) { throw new AlgorithmException("Training the classifier failed!", e); } long endBuildTime = System.currentTimeMillis(); this.logger.info( "Selected model has been built on entire dataset. Build time of chosen model was {}ms. Total construction time was {}ms ({}ms of that on preparation and {}ms on essential optimization). The chosen classifier is: {}", endBuildTime - startBuildTime, endBuildTime - this.timestampAlgorithmStart, startOptimizationTime - this.timestampAlgorithmStart, endBuildTime - startOptimizationTime, this.selectedClassifier); } else { this.logger.info("Selected model has not been built, since model building has been disabled. Total construction time was {}ms.", System.currentTimeMillis() - startOptimizationTime); } return this.terminate(); default: throw new IllegalStateException("Cannot do anything in state " + this.getState()); } } @Override public L call() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException { while (this.hasNext()) { this.nextWithException(); } return this.selectedClassifier; } @Override public void setLoggerName(final String name) { this.loggerName = name; this.logger.info("Switching logger name to {}", name); this.logger = LoggerFactory.getLogger(name); this.logger.info("Activated ML-Plan logger {}. Now setting logger of twoPhaseHASCO to {}.2phasehasco", name, name); if (this.optimizingFactory != null) { this.logger.info("Setting logger of {} to {}.optimizingfactory", this.optimizingFactory.getClass().getName(), this.loggerName); this.optimizingFactory.setLoggerName(this.loggerName + ".optimizingfactory"); } else { this.logger.debug("Optimizingfactory has not been set yet, so not customizing its logger."); } this.logger.info("Switched ML-Plan logger to {}", name); } public void setPortionOfDataForPhase2(final double portion) { this.getConfig().setProperty(MLPlanClassifierConfig.SELECTION_PORTION, String.valueOf(portion)); } @Override public String getLoggerName() { return this.loggerName; } @Override public MLPlanClassifierConfig getConfig() { return (MLPlanClassifierConfig) super.getConfig(); } public void setRandomSeed(final int seed) { this.getConfig().setProperty(MLPlanClassifierConfig.K_RANDOM_SEED, String.valueOf(seed)); } public L getSelectedClassifier() { return this.selectedClassifier; } public ComponentInstance getComponentInstanceOfSelectedClassifier() { return this.componentInstanceOfSelectedClassifier; } @SuppressWarnings("unchecked") public IPathSearchInput<TFDNode, String> getSearchProblemInputGenerator() { return ((TwoPhaseHASCO<? extends GraphSearchInput<TFDNode, String>, TFDNode, String>) this.optimizingFactory.getOptimizer()).getGraphSearchInput(); } public double getInternalValidationErrorOfSelectedClassifier() { return this.internalValidationErrorOfSelectedClassifier; } @Override public synchronized void cancel() { this.logger.info("Received cancel. First canceling optimizer, then invoking general shutdown."); this.optimizingFactory.cancel(); this.logger.debug("Now canceling main ML-Plan routine"); super.cancel(); assert this.isCanceled() : "Canceled-flag is not positive at the end of the cancel routine!"; this.logger.info("Completed cancellation of ML-Plan. Cancel status is {}", this.isCanceled()); } public OptimizingFactory<TwoPhaseSoftwareConfigurationProblem, L, HASCOSolutionCandidate<Double>, Double> getOptimizingFactory() { return this.optimizingFactory; } public IAlgorithm<?, ?> getSearch() { HASCO<?, ?, ?, ?> hasco = ((TwoPhaseHASCO<?, ?, ?>) this.optimizingFactory.getOptimizer()).getHasco(); return hasco.getSearch(); } @Subscribe public void receiveEvent(final IEvent e) { this.post(e); } public TwoPhaseHASCOFactory<GraphSearchWithPathEvaluationsInput<TFDNode, String, Double>, TFDNode, String> getTwoPhaseHASCOFactory() { return this.twoPhaseHASCOFactory; } public boolean isBuildSelectedClasifierOnGivenData() { return this.buildSelectedClasifierOnGivenData; } public void setBuildSelectedClasifierOnGivenData(final boolean buildSelectedClasifierOnGivenData) { this.buildSelectedClasifierOnGivenData = buildSelectedClasifierOnGivenData; } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/MLPlanSimpleBuilder.java
package ai.libs.mlplan.core; import java.io.File; import java.io.IOException; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import ai.libs.jaicore.ml.classification.singlelabel.learner.MajorityClassifier; import ai.libs.jaicore.ml.core.filter.FilterBasedDatasetSplitter; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.LabelBasedStratifiedSamplingFactory; public class MLPlanSimpleBuilder extends AbstractMLPlanBuilder<IClassifier, MLPlanSimpleBuilder> { public MLPlanSimpleBuilder() { try { this.withSearchSpaceConfigFile(new File("resources/mlplan/mlplan-simple.searchspace.json")); /* configure classifier factory */ this.withClassifierFactory(ci -> new MajorityClassifier()); /* configure dataset splitter */ this.withDatasetSplitterForSearchSelectionSplit(new FilterBasedDatasetSplitter<>(new LabelBasedStratifiedSamplingFactory<>(), .9, new Random(0))); this.withMCCVBasedCandidateEvaluationInSearchPhase().withNumMCIterations(3).withTrainFoldSize(.7); this.withMCCVBasedCandidateEvaluationInSelectionPhase().withNumMCIterations(3).withTrainFoldSize(.7); this.withRequestedInterface("AbstractClassifier"); } catch (IOException e) { throw new IllegalStateException("The resource file could not be found or accessed!", e); } } @Override public MLPlanSimpleBuilder getSelf() { return this; } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/PipelineEvaluator.java
package ai.libs.mlplan.core; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.common.attributedobjects.IInformedObjectEvaluatorExtension; import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.event.IEvent; import org.api4.java.common.event.IEventEmitter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.EventBus; import com.google.common.eventbus.Subscribe; import ai.libs.hasco.exceptions.ComponentInstantiationFailedException; import ai.libs.hasco.model.ComponentInstance; import ai.libs.jaicore.ml.scikitwrapper.ScikitLearnWrapper; import ai.libs.jaicore.timing.TimedObjectEvaluator; import ai.libs.mlplan.core.events.SupervisedLearnerCreatedEvent; /** * Evaluator used in the search phase of mlplan. * * @author fmohr */ public class PipelineEvaluator extends TimedObjectEvaluator<ComponentInstance, Double> implements IInformedObjectEvaluatorExtension<Double>, ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(PipelineEvaluator.class); private final EventBus eventBus = new EventBus(); private final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory; private final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> benchmark; private final int timeoutForEvaluation; private Double bestScore = 1.0; public PipelineEvaluator(final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory, final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> benchmark, final int timeoutForEvaluation) { super(); this.learnerFactory = learnerFactory; this.benchmark = benchmark; if (benchmark instanceof IEventEmitter) { ((IEventEmitter) benchmark).registerListener(this); } this.timeoutForEvaluation = timeoutForEvaluation; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger.info("Switching logger name from {} to {}", this.logger.getName(), name); this.logger = LoggerFactory.getLogger(name); if (this.benchmark instanceof ILoggingCustomizable) { this.logger.info("Setting logger name of actual benchmark {} to {}.benchmark", this.benchmark.getClass().getName(), name); ((ILoggingCustomizable) this.benchmark).setLoggerName(name + ".benchmark"); } else { this.logger.info("Benchmark {} does not implement ILoggingCustomizable, not customizing its logger.", this.benchmark.getClass().getName()); } } @SuppressWarnings("unchecked") @Override public Double evaluateSupervised(final ComponentInstance c) throws InterruptedException, ObjectEvaluationFailedException { this.logger.debug("Received request to evaluate component instance {}", c); try { if (this.benchmark instanceof IInformedObjectEvaluatorExtension) { ((IInformedObjectEvaluatorExtension<Double>) this.benchmark).informAboutBestScore(this.bestScore); } ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> learner = this.learnerFactory.getComponentInstantiation(c); this.eventBus.post(new SupervisedLearnerCreatedEvent(c, learner)); // inform listeners about the creation of the classifier if (this.logger.isDebugEnabled()) { this.logger.debug("Starting benchmark {} for classifier {}", this.benchmark, (learner instanceof ScikitLearnWrapper) ? learner.toString() : learner.getClass().getName()); } Double score = this.benchmark.evaluate(learner); if (this.logger.isInfoEnabled()) { this.logger.info("Obtained score {} for classifier {}", score, (learner instanceof ScikitLearnWrapper) ? learner.toString() : learner.getClass().getName()); } return score; } catch (ComponentInstantiationFailedException e) { throw new ObjectEvaluationFailedException("Evaluation of composition failed as the component instantiation could not be built.", e); } } @Override public void informAboutBestScore(final Double bestScore) { this.bestScore = bestScore; } @Override public long getTimeout(final ComponentInstance item) { return this.timeoutForEvaluation; } @Override public String getMessage(final ComponentInstance item) { return "Pipeline evaluation phase"; } public ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<?>> getBenchmark() { return this.benchmark; } /** * Here, we send a coupling event that informs the listener about which ComponentInstance has been used to create a classifier. * * @param listener */ public void registerListener(final Object listener) { this.eventBus.register(listener); } /** * Forwards every incoming event e * * @param e */ @Subscribe public void receiveEvent(final IEvent e) { this.eventBus.post(e); } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/PipelineValidityCheckingNodeEvaluator.java
package ai.libs.mlplan.core; import java.util.Collection; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.hasco.model.Component; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; public abstract class PipelineValidityCheckingNodeEvaluator implements IPathEvaluator<TFDNode, String, Double> { private ILabeledDataset<?> data; private Collection<Component> components; public PipelineValidityCheckingNodeEvaluator() { } public PipelineValidityCheckingNodeEvaluator(final Collection<Component> components, final ILabeledDataset<?> data) { this.data = data; this.components = components; } public void setData(final ILabeledDataset<?> data) { this.data = data; } public void setComponents(final Collection<Component> components) { this.components = components; } public ILabeledDataset<?> getData() { return this.data; } public Collection<Component> getComponents() { return this.components; } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/PreferenceBasedNodeEvaluator.java
package ai.libs.mlplan.core; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.datastructure.graph.ILabeledPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.core.Util; import ai.libs.hasco.model.Component; import ai.libs.hasco.model.ComponentInstance; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.model.travesaltree.BackPointerPath; public class PreferenceBasedNodeEvaluator implements IPathEvaluator<TFDNode, String, Double>, ILoggingCustomizable { private final Collection<Component> components; private final List<String> orderingOfComponents; private Logger logger = LoggerFactory.getLogger(PreferenceBasedNodeEvaluator.class); private boolean sentLogMessageForHavingEnteredSecondSubPhase = false; private String methodPrefix = "resolveAbstractClassifierWith"; public PreferenceBasedNodeEvaluator(final Collection<Component> components, final List<String> orderingOfComponents) { super(); this.components = components; this.orderingOfComponents = orderingOfComponents; } public PreferenceBasedNodeEvaluator(final Collection<Component> components) { this(components, new ArrayList<>()); } public PreferenceBasedNodeEvaluator(final Collection<Component> components, final List<String> orderingOfComponents, final String methodPrefix) { this(components, orderingOfComponents); this.methodPrefix = methodPrefix; } @Override public Double evaluate(final ILabeledPath<TFDNode, String> n) { this.logger.info("Received request for node evaluation."); List<String> appliedMethods = new LinkedList<>(); for (TFDNode x : n.getNodes()) { if (x.getAppliedMethodInstance() != null) { appliedMethods.add(x.getAppliedMethodInstance().getMethod().getName()); } } this.logger.debug("Determined {} applied methods: {}", appliedMethods.size(), appliedMethods); /* get partial component */ ComponentInstance instance = Util.getSolutionCompositionFromState(this.components, n.getHead().getState(), false); boolean isPipeline = appliedMethods.stream().anyMatch(x -> x.toLowerCase().contains("pipeline")); boolean lastMethod = false; String classifierName = null; Double score = 0.0; this.logger.debug("The associated component instance is {}. Constitutes a pipeline? {}", instance, isPipeline ? "yes" : "no"); if (instance != null) { if (instance.getComponent().getName().toLowerCase().contains("pipeline")) { lastMethod = lastMethod || appliedMethods.get(appliedMethods.size() - 1).startsWith("resolveBaseClassifierWith"); if (instance.getSatisfactionOfRequiredInterfaces().containsKey("classifier")) { classifierName = instance.getSatisfactionOfRequiredInterfaces().get("classifier").getComponent().getName(); } else { return 0.0; } } else { classifierName = instance.getComponent().getName(); lastMethod = lastMethod || appliedMethods.get(appliedMethods.size() - 1).startsWith(this.methodPrefix); } if (lastMethod) { if (isPipeline) { score += this.orderingOfComponents.size() + 1; } score += (this.orderingOfComponents.contains(classifierName) ? this.orderingOfComponents.indexOf(classifierName) + 1 : this.orderingOfComponents.size() + 1); score *= 1.0e-10; } else { score = null; if (!this.sentLogMessageForHavingEnteredSecondSubPhase) { double scoreOfParent; if ((scoreOfParent = ((BackPointerPath<TFDNode, String, Double>)n.getPathToParentOfHead()).getScore()) > 1.0e-6) { this.sentLogMessageForHavingEnteredSecondSubPhase = true; this.logger.info("Entering phase 1b! Breadth first search ends here, because the search is asking for the f-value of a node whose parent has been truely evaluated with an f-value of {}", scoreOfParent); } } } } return score; } @Override public String toString() { return "PreferenceBasedNodeEvaluator [ORDERING_OF_CLASSIFIERS=" + this.orderingOfComponents + "]"; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/events/ClassifierFoundEvent.java
package ai.libs.mlplan.core.events; import java.util.HashMap; import java.util.Map; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.IAlgorithm; import org.api4.java.algorithm.events.result.IScoredSolutionCandidateFoundEvent; import ai.libs.hasco.model.ComponentInstance; import ai.libs.jaicore.basic.algorithm.ASolutionCandidateFoundEvent; import ai.libs.jaicore.logging.ToJSONStringUtil; public class ClassifierFoundEvent extends ASolutionCandidateFoundEvent<ISupervisedLearner<?, ?>> implements IScoredSolutionCandidateFoundEvent<ISupervisedLearner<?, ?>, Double> { private final double inSampleError; private final ComponentInstance componentDescription; public ClassifierFoundEvent(final IAlgorithm<?, ?> algorithm, final ComponentInstance componentDescription, final ISupervisedLearner<?, ?> solutionCandidate, final double inSampleError) { super(algorithm, solutionCandidate); this.inSampleError = inSampleError; this.componentDescription = componentDescription; } public double getInSampleError() { return this.inSampleError; } @Override public Double getScore() { return this.inSampleError; } public ComponentInstance getComponentDescription() { return this.componentDescription; } @Override public String toString() { Map<String, Object> fields = new HashMap<>(); fields.put("candidate", super.getSolutionCandidate()); fields.put("componentDescription", this.componentDescription); fields.put("inSampleError", this.inSampleError); return ToJSONStringUtil.toJSONString(fields); } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/events/MLPlanPhaseSwitchedEvent.java
package ai.libs.mlplan.core.events; import org.api4.java.algorithm.IAlgorithm; import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent; public class MLPlanPhaseSwitchedEvent extends AAlgorithmEvent { public MLPlanPhaseSwitchedEvent(final IAlgorithm<?, ?> algorithm) { super(algorithm); } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/core/events/SupervisedLearnerCreatedEvent.java
package ai.libs.mlplan.core.events; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.common.event.IEvent; import ai.libs.hasco.model.ComponentInstance; public class SupervisedLearnerCreatedEvent implements IEvent { private final ComponentInstance instance; private final ISupervisedLearner<?, ?> classifier; private final long timestamp = System.currentTimeMillis(); public SupervisedLearnerCreatedEvent(final ComponentInstance instance, final ISupervisedLearner<?, ?> classifier) { super(); this.instance = instance; this.classifier = classifier; } public ComponentInstance getInstance() { return this.instance; } public ISupervisedLearner<?, ?> getClassifier() { return this.classifier; } @Override public long getTimestamp() { return this.timestamp; } }
0
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan
java-sources/ai/libs/mlplan/0.2.1/ai/libs/mlplan/multiclass/MLPlanClassifierConfig.java
package ai.libs.mlplan.multiclass; import java.io.File; import org.aeonbits.owner.Config.Sources; import ai.libs.hasco.variants.forwarddecomposition.twophase.TwoPhaseHASCOConfig; @Sources({ "file:conf/mlplan.properties" }) public interface MLPlanClassifierConfig extends TwoPhaseHASCOConfig { public static final String PREFERRED_COMPONENTS = "mlplan.preferredComponents"; public static final String SELECTION_PORTION = "mlplan.selectionportion"; @Key(SELECTION_PORTION) @DefaultValue("0.3") public double dataPortionForSelection(); @Key(PREFERRED_COMPONENTS) @DefaultValue("conf/mlplan/precedenceList.txt") public File preferredComponents(); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/AMLPlanBuilder.java
package ai.libs.mlplan.core; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.aeonbits.owner.ConfigFactory; import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.Timeout; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.builder.forwarddecomposition.DefaultPathPriorizingPredicate; import ai.libs.hasco.builder.forwarddecomposition.HASCOViaFDBuilder; import ai.libs.hasco.twophase.HASCOWithRandomCompletionsConfig; import ai.libs.jaicore.basic.FileUtil; import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig; import ai.libs.jaicore.basic.IOwnerBasedRandomConfig; import ai.libs.jaicore.basic.ResourceFile; import ai.libs.jaicore.basic.ResourceUtil; import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction; import ai.libs.jaicore.basic.reconstruction.ReconstructionUtil; import ai.libs.jaicore.basic.sets.SetUtil; import ai.libs.jaicore.components.api.IComponentRepository; import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap; import ai.libs.jaicore.components.serialization.ComponentSerialization; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.MonteCarloCrossValidationEvaluatorFactory; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.mlplan.multiclass.IMLPlanClassifierConfig; import ai.libs.mlplan.safeguard.IEvaluationSafeGuardFactory; /** * The MLPlanBuilder helps to easily configure and initialize ML-Plan with specific parameter settings. For convenient use, the MLPlanBuilder also offers methods for initializing ML-Plan with default configuration to use ML-Plan for single * label classification in combination with WEKA or scikit-learn or for multi-label classification in combination with MEKA and consequently with WEKA (for baselearners of multi-label reduction strategies). * * @author Felix Mohr, Marcel Wever */ public abstract class AMLPlanBuilder<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>, B extends AMLPlanBuilder<L, B>> implements IMLPlanBuilder<L, B>, ILoggingCustomizable { /* Logging */ private Logger logger = LoggerFactory.getLogger(AMLPlanBuilder.class); private String loggerName = AMLPlanBuilder.class.getName(); private static final String RES_ALGORITHM_CONFIG = "mlplan/mlplan.properties"; private static final String FS_ALGORITHM_CONFIG = "conf/mlplan.properties"; /* Default configuration values */ private static final File DEF_ALGORITHM_CONFIG = FileUtil.getExistingFileWithHighestPriority(RES_ALGORITHM_CONFIG, FS_ALGORITHM_CONFIG); /* problem description aspects */ private final ComponentSerialization serializer = new ComponentSerialization(); private File searchSpaceFile; private String requestedHASCOInterface; private String nameOfHASCOMethodToResolveBareLearner; private String nameOfHASCOMethodToResolverLearnerInPipeline; private ILearnerFactory<L> learnerFactory; private ILabeledDataset<?> dataset; /* other general properties of ML-Plan */ private IMLPlanClassifierConfig algorithmConfig; /* node evaluation and search guidance */ private Predicate<TFDNode> priorizingPredicate = new DefaultPathPriorizingPredicate<>(); // by default, we prefer paths that lead to default parametrizations private List<IPathEvaluator<TFDNode, String, Double>> preferredNodeEvaluators = new ArrayList<>(); private PipelineValidityCheckingNodeEvaluator pipelineValidityCheckingNodeEvaluator; /* Candidate Evaluation (if no other node evaluation is used) */ private IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> searchSelectionDatasetSplitter; private IDeterministicPredictionPerformanceMeasure<?, ?> metricForSearchPhase; private IDeterministicPredictionPerformanceMeasure<?, ?> metricForSelectionPhase; private ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> factoryForPipelineEvaluationInSearchPhase = this.getMCCVFactory(5, .7); private ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> factoryForPipelineEvaluationInSelectionPhase = this.getMCCVFactory(5, .7); private IEvaluationSafeGuardFactory safeGuard = null; protected AMLPlanBuilder() { super(); this.withAlgorithmConfigFile(DEF_ALGORITHM_CONFIG); this.withSeed(0); } protected AMLPlanBuilder(final IProblemType<L> problemType) throws IOException { super(); this.withAlgorithmConfigFile(DEF_ALGORITHM_CONFIG); this.withProblemType(problemType); this.withSeed(0); } public AMLPlanBuilder<L, B> withProblemType(final IProblemType<L> problemType) throws IOException { if (this.logger.isInfoEnabled()) { this.logger.info("Setting problem type to {}.", problemType.getName()); } this.withSearchSpaceConfigFile(FileUtil.getExistingFileWithHighestPriority(problemType.getSearchSpaceConfigFileFromResource(), problemType.getSearchSpaceConfigFromFileSystem())); this.withRequestedInterface(problemType.getRequestedInterface()); this.withLearnerFactory(problemType.getLearnerFactory()); /* setup everything for preferred components */ if (problemType.getPreferredComponentListFromResource() != null || problemType.getPreferredComponentListFromFileSystem() != null) { boolean relevantFileAvailable = true; if (problemType.getPreferredComponentListFromResource() == null) { relevantFileAvailable = new File(problemType.getPreferredComponentListFromFileSystem()).exists(); } if (relevantFileAvailable) { this.withPreferredComponentsFile(FileUtil.getExistingFileWithHighestPriority(problemType.getPreferredComponentListFromResource(), problemType.getPreferredComponentListFromFileSystem())); this.nameOfHASCOMethodToResolveBareLearner = problemType.getLastHASCOMethodPriorToParameterRefinementOfBareLearner(); this.nameOfHASCOMethodToResolverLearnerInPipeline = problemType.getLastHASCOMethodPriorToParameterRefinementOfPipeline(); } } this.withPipelineValidityCheckingNodeEvaluator(problemType.getValidityCheckingNodeEvaluator()); /* configure the metric defined in the problem type */ this.withPerformanceMeasureForSearchPhase(problemType.getPerformanceMetricForSearchPhase()); this.withPerformanceMeasureForSelectionPhase(problemType.getPerformanceMetricForSelectionPhase()); this.searchSelectionDatasetSplitter = problemType.getSearchSelectionDatasetSplitter(); return this.getSelf(); } public B withPerformanceMeasureForSearchPhase(final IDeterministicPredictionPerformanceMeasure<?, ?> performanceMeasure) { this.metricForSearchPhase = performanceMeasure; return this.getSelf(); } public B withPerformanceMeasureForSelectionPhase(final IDeterministicPredictionPerformanceMeasure<?, ?> performanceMeasure) { this.metricForSelectionPhase = performanceMeasure; return this.getSelf(); } public B withPerformanceMeasure(final IDeterministicPredictionPerformanceMeasure<?, ?> performanceMeasure) { this.withPerformanceMeasureForSearchPhase(performanceMeasure); this.withPerformanceMeasureForSelectionPhase(performanceMeasure); return this.getSelf(); } @Override public IDeterministicPredictionPerformanceMeasure<?, ?> getMetricForSearchPhase() { return this.metricForSearchPhase; } @Override public IDeterministicPredictionPerformanceMeasure<?, ?> getMetricForSelectionPhase() { return this.metricForSelectionPhase; } /** * Creates a preferred node evaluator that can be used to prefer components over other components. * * @param preferredComponentsFile * The file containing a priority list of component names. * @return The builder object. * @throws IOException * Thrown if a problem occurs while trying to read the file containing the priority list. */ public B withPreferredComponentsFile(final File preferredComponentsFile) throws IOException { this.getAlgorithmConfig().setProperty(IMLPlanClassifierConfig.PREFERRED_COMPONENTS, preferredComponentsFile.getAbsolutePath()); List<String> namesOfPreferredComponents = null; // the order is important! if (preferredComponentsFile instanceof ResourceFile) { namesOfPreferredComponents = ResourceUtil.readResourceFileToStringList((ResourceFile) preferredComponentsFile); } else if (!preferredComponentsFile.exists()) { this.logger.warn("The configured file for preferred components \"{}\" does not exist. Not using any particular ordering.", preferredComponentsFile.getAbsolutePath()); } else { namesOfPreferredComponents = FileUtil.readFileAsList(preferredComponentsFile); } if (namesOfPreferredComponents != null) { this.withPreferredComponents(namesOfPreferredComponents); } return this.getSelf(); } public B withPreferredComponents(final List<String> preferredComponents) { this.getAlgorithmConfig().setProperty(IMLPlanClassifierConfig.PREFERRED_COMPONENTS, "" + SetUtil.implode(preferredComponents, ", ")); return this.getSelf(); } public List<String> getPreferredComponents() { return this.getAlgorithmConfig().preferredComponents(); } /** * This adds a new preferred node evaluator * * It is possible to specify several preferred node evaluators, which will be ordered by the order in which they are specified. The latest given evaluator is the most preferred one. * * @param preferredNodeEvaluator * @return */ public B withPreferredNodeEvaluator(final IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator) { this.preferredNodeEvaluators.add(preferredNodeEvaluator); return this.getSelf(); } public List<IPathEvaluator<TFDNode, String, Double>> getPreferredNodeEvaluators() { return Collections.unmodifiableList(this.preferredNodeEvaluators); } public B withSearchFactory(@SuppressWarnings("rawtypes") final IOptimalPathInORGraphSearchFactory searchFactory, @SuppressWarnings("rawtypes") final AlgorithmicProblemReduction transformer) { throw new UnsupportedOperationException("Currently only support for BestFirst search. Will be extended in the upcoming release."); } public IComponentRepository getComponents() throws IOException { return this.serializer.deserializeRepository(this.searchSpaceFile); } public INumericParameterRefinementConfigurationMap getComponentParameterConfigurations() throws IOException { return this.serializer.deserializeParamMap(this.searchSpaceFile); } /** * Loads the MLPlanClassifierConfig with default values and replaces all properties according to the properties defined in the given config file. * * @param algorithmConfigFile * The file specifying the property values to replace the default configuration. * @return The MLPlanBuilder object. * @throws IOException * An IOException is thrown if there are issues reading the config file. */ public B withAlgorithmConfigFile(final File algorithmConfigFile) { return this.withAlgorithmConfig((IMLPlanClassifierConfig) ConfigFactory.create(IMLPlanClassifierConfig.class).loadPropertiesFromFile(algorithmConfigFile)); } /** * Loads the MLPlanClassifierConfig with default values and replaces all properties according to the properties defined in the given config file. * * @param config * The algorithm configuration. * @return The MLPlanBuilder object. * @throws IOException * An IOException is thrown if there are issues reading the config file. */ public B withAlgorithmConfig(final IMLPlanClassifierConfig config) { this.algorithmConfig = config; return this.getSelf(); } /** * Set the data for which ML-Plan is supposed to find the best pipeline. * * @param dataset * The dataset for which ML-Plan is to be run. * @return The builder object. */ public B withDataset(final ILabeledDataset<?> dataset) { if (!ReconstructionUtil.areInstructionsNonEmptyIfReconstructibilityClaimed(dataset)) { this.logger.warn("The dataset claims to be reconstructible, but it does not carry any instructions."); } this.dataset = dataset; if (dataset.stream().anyMatch(i -> i.getLabel() == null)) { this.logger.warn("Dataset has instances without label. Dropping those lines!! Number of instances now: {}", this.dataset.size()); this.dataset.removeIf(i -> i.getLabel() == null); this.logger.warn("Dataset is now reduced. Number of instances now: {}", this.dataset.size()); } return this.getSelf(); } public ILabeledDataset<?> getDataset() { return this.dataset; } /** * Specify the search space in which ML-Plan is required to work. * * @param searchSpaceConfig * The file of the search space configuration. * @return The builder object. * @throws IOException * Thrown if the given file does not exist. */ public B withSearchSpaceConfigFile(final File searchSpaceConfig) throws IOException { FileUtil.requireFileExists(searchSpaceConfig); this.searchSpaceFile = searchSpaceConfig; this.logger.info("The search space configuration file has been set to {}.", searchSpaceConfig.getCanonicalPath()); return this.getSelf(); } /** * Set the classifier factory that translates <code>CompositionInstance</code> objects to classifiers that can be evaluated. * * @param classifierFactory * The classifier factory to be used to translate CompositionInstance objects to classifiers. * @return The builder object. */ public B withLearnerFactory(final ILearnerFactory<L> classifierFactory) { this.learnerFactory = classifierFactory; return this.getSelf(); } /** * Set the dataset splitter that is used for generating the holdout data portion that is put aside during search. * * @param datasetSplitter * The dataset splitter to be used. * @return The builder obect. */ public B withDatasetSplitterForSearchSelectionSplit(final IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> datasetSplitter) { this.searchSelectionDatasetSplitter = datasetSplitter; return this.getSelf(); } public B withRequestedInterface(final String requestedInterface) { this.requestedHASCOInterface = requestedInterface; return this.getSelf(); } /** * @param timeout * The timeout for ML-Plan to search for the best classifier. * @return The builder object. */ public B withTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(IOwnerBasedAlgorithmConfig.K_TIMEOUT, timeout.milliseconds() + ""); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getTimeOut() { return new Timeout(this.algorithmConfig.timeout(), TimeUnit.MILLISECONDS); } public B withTimeoutPrecautionOffsetInSeconds(final int seconds) { this.algorithmConfig.setProperty(IMLPlanClassifierConfig.PRECAUTION_OFFSET, "" + seconds); return this.getSelf(); } public int getTimeoutPrecautionOffsetInSeconds() { return this.algorithmConfig.precautionOffset(); } /** * @param timeout * The timeout for a single candidate evaluation. * @return The builder object. */ public B withNodeEvaluationTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(HASCOWithRandomCompletionsConfig.K_RANDOM_COMPLETIONS_TIMEOUT_NODE, timeout.milliseconds() + ""); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getNodeEvaluationTimeOut() { return new Timeout(this.algorithmConfig.timeoutForNodeEvaluation(), TimeUnit.MILLISECONDS); } /** * @param timeout * The timeout for a single candidate evaluation. * @return The builder object. */ public B withCandidateEvaluationTimeOut(final Timeout timeout) { this.algorithmConfig.setProperty(HASCOWithRandomCompletionsConfig.K_RANDOM_COMPLETIONS_TIMEOUT_PATH, timeout.milliseconds() + ""); return this.getSelf(); } /** * @return The timeout for ML-Plan to search for the best classifier. */ public Timeout getCandidateEvaluationTimeOut() { return new Timeout(this.algorithmConfig.timeoutForCandidateEvaluation(), TimeUnit.MILLISECONDS); } public B withMCCVBasedCandidateEvaluationInSearchPhase(final int numIterations, final double trainPortion) { this.factoryForPipelineEvaluationInSearchPhase = this.getMCCVFactory(numIterations, trainPortion); return this.getSelf(); } public B withMCCVBasedCandidateEvaluationInSelectionPhase(final int numIterations, final double trainPortion) { this.factoryForPipelineEvaluationInSelectionPhase = this.getMCCVFactory(numIterations, trainPortion); return this.getSelf(); } private MonteCarloCrossValidationEvaluatorFactory getMCCVFactory(final int numIterations, final double trainPortion) { MonteCarloCrossValidationEvaluatorFactory factory = new MonteCarloCrossValidationEvaluatorFactory(); factory.withNumMCIterations(numIterations).withTrainFoldSize(trainPortion); return factory; } @Override public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSearchPhase() { return this.factoryForPipelineEvaluationInSearchPhase; } @Override public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSelectionPhase() { return this.factoryForPipelineEvaluationInSelectionPhase; } /** * Sets the evaluator factory for the search phase. * * @param evaluatorFactory * The evaluator factory for the search phase. * @return The builder object. */ public void withSearchPhaseEvaluatorFactory(final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactory) { this.factoryForPipelineEvaluationInSearchPhase = evaluatorFactory; } /** * @return The factory for the classifier evaluator of the search phase. */ protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getSearchEvaluatorFactory() { return this.factoryForPipelineEvaluationInSearchPhase; } /** * Sets the evaluator factory for the selection phase. * * @param evaluatorFactory * The evaluator factory for the selection phase. * @return The builder object. */ public B withSelectionPhaseEvaluatorFactory(final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactory) { this.factoryForPipelineEvaluationInSelectionPhase = evaluatorFactory; return this.getSelf(); } /** * Sets the number of cpus that may be used by ML-Plan. * * @param numCpus * The number of cpus to use. * @return The builder object. */ public B withNumCpus(final int numCpus) { this.algorithmConfig.setProperty(IOwnerBasedAlgorithmConfig.K_CPUS, numCpus + ""); return this.getSelf(); } public B withSeed(final long seed) { this.algorithmConfig.setProperty(IOwnerBasedRandomConfig.K_SEED, seed + ""); this.logger.info("Seed has been set to {}", seed); return this.getSelf(); } /** * @return The factory for the classifier evaluator of the selection phase. */ protected ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getSelectionEvaluatorFactory() { return this.factoryForPipelineEvaluationInSelectionPhase; } @Override public HASCOViaFDBuilder<Double, ?> getHASCOFactory() { return MLPlanUtil.getHASCOBuilder(this.algorithmConfig, this.dataset, this.searchSpaceFile, this.requestedHASCOInterface, this.priorizingPredicate, this.preferredNodeEvaluators, this.pipelineValidityCheckingNodeEvaluator, this.nameOfHASCOMethodToResolveBareLearner, this.nameOfHASCOMethodToResolverLearnerInPipeline); } @Override public ILearnerFactory<L> getLearnerFactory() { return this.learnerFactory; } @Override public String getLoggerName() { return this.loggerName; } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); this.serializer.setLoggerName(name + ".serializer"); this.loggerName = name; } @Override public String getRequestedInterface() { return this.requestedHASCOInterface; } @Override public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> getSearchSelectionDatasetSplitter() { return this.searchSelectionDatasetSplitter; } @Override public File getSearchSpaceConfigFile() { return this.searchSpaceFile; } @Override public IMLPlanClassifierConfig getAlgorithmConfig() { return this.algorithmConfig; } public B withPipelineValidityCheckingNodeEvaluator(final PipelineValidityCheckingNodeEvaluator ne) { this.pipelineValidityCheckingNodeEvaluator = ne; return this.getSelf(); } public PipelineValidityCheckingNodeEvaluator getPipelineValidityCheckingNodeEvaluator() { return this.pipelineValidityCheckingNodeEvaluator; } public B withPortionOfDataReservedForSelection(final double value) { this.algorithmConfig.setProperty(IMLPlanClassifierConfig.SELECTION_PORTION, value + ""); return this.getSelf(); } @Override public double getPortionOfDataReservedForSelectionPhase() { return this.algorithmConfig.dataPortionForSelection(); } public B withSafeGuardFactory(final IEvaluationSafeGuardFactory safeGuard) { this.safeGuard = safeGuard; return this.getSelf(); } @Override public IEvaluationSafeGuardFactory getSafeGuardFactory() { return this.safeGuard; } /** * Builds an ML-Plan object for the given dataset as input. * * @param dataset * The dataset for which an ML-Plan object is to be built. * @return The ML-Plan object configured with this builder. * @throws InterruptedException */ public MLPlan<L> build(final ILabeledDataset<?> dataset) throws InterruptedException { return this.withDataset(dataset).build(); } public void checkPreconditionsForInitialization() { /* check proper problem definition */ Objects.requireNonNull(this.searchSpaceFile, "No search space file defined."); Objects.requireNonNull(this.requestedHASCOInterface, "No requested HASCO interface defined!"); /* check that data is available */ Objects.requireNonNull(this.dataset, "A dataset needs to be provided as input to ML-Plan"); /* check that the evaluation factories and the search/selection splitter are defined */ Objects.requireNonNull(this.learnerFactory, "The learner factory has not been set."); Objects.requireNonNull(this.factoryForPipelineEvaluationInSearchPhase, "Factory for pipeline evaluation in search phase is not set!"); Objects.requireNonNull(this.factoryForPipelineEvaluationInSelectionPhase, "Factory for pipeline evaluation in selection phase is not set!"); Objects.requireNonNull(this.searchSelectionDatasetSplitter, "Dataset splitter for search phase must be set!"); } /** * Builds an ML-Plan object with the dataset provided earlier to this builder. * * @return The ML-Plan object configured with this builder. * @throws InterruptedException */ public MLPlan<L> build() throws InterruptedException { this.checkPreconditionsForInitialization(); return new MLPlan<>(this, this.dataset); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/ILearnerFactory.java
package ai.libs.mlplan.core; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.jaicore.components.optimizingfactory.BaseFactory; public interface ILearnerFactory<L extends ISupervisedLearner<?, ?>> extends BaseFactory<L> { }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/IMLPlanBuilder.java
package ai.libs.mlplan.core; import java.io.File; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.hasco.builder.forwarddecomposition.HASCOViaFDBuilder; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.LearnerEvaluatorConstructionFailedException; import ai.libs.mlplan.multiclass.IMLPlanClassifierConfig; import ai.libs.mlplan.safeguard.IEvaluationSafeGuardFactory; /** * The IMLPlanBuilder provides the general interface of an ML-Plan builder independent * of the problem domain or specific library that is used for the configuration of machine * learning pipelines. * * @author mwever * @author fmohr * */ public interface IMLPlanBuilder<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>, B extends IMLPlanBuilder<L, B>> { /** * This is the splitter that splits the given input data into data for the search phase and for the selection phase * @return */ public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> getSearchSelectionDatasetSplitter(); /** * This is the factory that will be used to create the pipeline evaluators for evaluation during search time * @return * @throws LearnerEvaluatorConstructionFailedException */ public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSearchPhase(); public IDeterministicPredictionPerformanceMeasure<?, ?> getMetricForSearchPhase(); /** * This is the factory that will be used to create the pipeline evaluators for evaluation during selection time * @return * @throws LearnerEvaluatorConstructionFailedException */ public ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearnerEvaluationFactoryForSelectionPhase(); public IDeterministicPredictionPerformanceMeasure<?, ?> getMetricForSelectionPhase(); public String getRequestedInterface(); public File getSearchSpaceConfigFile(); public ILearnerFactory<L> getLearnerFactory(); public HASCOViaFDBuilder<Double, ?> getHASCOFactory(); public IMLPlanClassifierConfig getAlgorithmConfig(); public IEvaluationSafeGuardFactory getSafeGuardFactory(); public double getPortionOfDataReservedForSelectionPhase(); public B getSelf(); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/IProblemType.java
package ai.libs.mlplan.core; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; public interface IProblemType<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> { public String getName(); public String getSearchSpaceConfigFileFromResource(); public String getSearchSpaceConfigFromFileSystem(); public String getRequestedInterface(); public String getPreferredComponentListFromResource(); public String getPreferredComponentListFromFileSystem(); public String getLastHASCOMethodPriorToParameterRefinementOfBareLearner(); public String getLastHASCOMethodPriorToParameterRefinementOfPipeline(); public PipelineValidityCheckingNodeEvaluator getValidityCheckingNodeEvaluator(); public ILearnerFactory<L> getLearnerFactory(); public IDeterministicPredictionPerformanceMeasure<?, ?> getPerformanceMetricForSearchPhase(); public IDeterministicPredictionPerformanceMeasure<?, ?> getPerformanceMetricForSelectionPhase(); public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> getSearchSelectionDatasetSplitter(); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/ITimeTrackingLearner.java
package ai.libs.mlplan.core; import java.util.List; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.jaicore.components.api.IComponentInstance; public interface ITimeTrackingLearner extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> { public ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearner(); public List<Long> getFitTimes(); public List<Long> getBatchPredictionTimesInMS(); public List<Long> getInstancePredictionTimesInMS(); public IComponentInstance getComponentInstance(); public void setPredictedInductionTime(final String inductionTime); public void setPredictedInferenceTime(final String inferenceTime); public Double getPredictedInductionTime(); public Double getPredictedInferenceTime(); public void setScore(Double score); public Double getScore(); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/MLPlan.java
package ai.libs.mlplan.core; import java.io.IOException; import java.util.Objects; import java.util.Random; import java.util.concurrent.TimeUnit; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.IAlgorithm; import org.api4.java.algorithm.Timeout; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.event.IEvent; import org.api4.java.common.reconstruction.IReconstructible; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import ai.libs.hasco.builder.TwoPhaseHASCOBuilder; import ai.libs.hasco.builder.forwarddecomposition.HASCOViaFDBuilder; import ai.libs.hasco.core.HASCO; import ai.libs.hasco.core.HASCOSolutionCandidate; import ai.libs.hasco.core.events.HASCOSolutionEvent; import ai.libs.hasco.core.events.TwoPhaseHASCOPhaseSwitchEvent; import ai.libs.hasco.twophase.TwoPhaseHASCO; import ai.libs.hasco.twophase.TwoPhaseHASCOConfig; import ai.libs.hasco.twophase.TwoPhaseSoftwareConfigurationProblem; import ai.libs.jaicore.basic.MathExt; import ai.libs.jaicore.basic.algorithm.AAlgorithm; import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent; import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent; import ai.libs.jaicore.basic.algorithm.EAlgorithmState; import ai.libs.jaicore.basic.reconstruction.ReconstructionUtil; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.exceptions.ComponentInstantiationFailedException; import ai.libs.jaicore.components.optimizingfactory.OptimizingFactory; import ai.libs.jaicore.components.optimizingfactory.OptimizingFactoryProblem; import ai.libs.jaicore.components.serialization.ComponentSerialization; import ai.libs.jaicore.ml.core.dataset.DatasetUtil; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.LearnerEvaluatorConstructionFailedException; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.mlplan.core.events.ClassifierFoundEvent; import ai.libs.mlplan.core.events.MLPlanPhaseSwitchedEvent; import ai.libs.mlplan.multiclass.IMLPlanClassifierConfig; public class MLPlan<L extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> extends AAlgorithm<ILabeledDataset<?>, L> implements ILoggingCustomizable { /** Logger for controlled output. */ private Logger logger = LoggerFactory.getLogger(MLPlan.class); private String loggerName; private final ComponentSerialization serializer = new ComponentSerialization(); private L selectedClassifier; private double internalValidationErrorOfSelectedClassifier; private IComponentInstance componentInstanceOfSelectedClassifier; private final IMLPlanBuilder<L, ?> builder; private TwoPhaseHASCOBuilder<TFDNode, String> twoPhaseHASCOFactory; private OptimizingFactory<TwoPhaseSoftwareConfigurationProblem, L, HASCOSolutionCandidate<Double>, Double> optimizingFactory; private PipelineEvaluator classifierEvaluatorForSearch; private PipelineEvaluator classifierEvaluatorForSelection; private boolean buildSelectedClasifierOnGivenData = true; private final long seed; private long timestampAlgorithmStart; private boolean maintainReconstructibility = true; protected MLPlan(final IMLPlanBuilder<L, ?> builder, final ILabeledDataset<?> data) { // ML-Plan has a package visible constructor, because it should only be constructed using a builder super(builder.getAlgorithmConfig(), data); /* sanity checks */ if (builder.getSearchSpaceConfigFile() == null || !builder.getSearchSpaceConfigFile().exists()) { throw new IllegalArgumentException("The search space configuration file must be set in MLPlanBuilder, and it must be set to a file that exists!"); } Objects.requireNonNull(builder.getLearnerFactory(), "The learner factory must be set in MLPlanBuilder!"); if (builder.getRequestedInterface() == null || builder.getRequestedInterface().isEmpty()) { throw new IllegalArgumentException("No requested HASCO interface defined!"); } if (this.getConfig().getTimeout().seconds() <= this.getConfig().precautionOffset()) { throw new IllegalArgumentException("Illegal timeout configuration. The precaution offset must be strictly smaller than the specified timeout."); } /* store builder and data for main algorithm */ this.builder = builder; Objects.requireNonNull(this.getInput()); if (this.getInput().isEmpty()) { throw new IllegalArgumentException("Cannot run ML-Plan on empty dataset."); } this.seed = this.builder.getAlgorithmConfig().seed(); if (this.getInput() instanceof IReconstructible) { this.maintainReconstructibility = ReconstructionUtil.areInstructionsNonEmptyIfReconstructibilityClaimed(this.getInput()); if (!this.maintainReconstructibility) { this.logger.warn("The dataset claims to be reconstructible, but it does not carry any instructions. ML-Plan will not add reconstruction instructions."); } } else { this.maintainReconstructibility = false; } } @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException { switch (this.getState()) { case CREATED: this.setTimeoutPrecautionOffset(1000); // for this routine, only consider a precaution of 1s this.logger.info("Starting an ML-Plan instance. Timeout precaution is {}ms", this.getTimeoutPrecautionOffset()); this.timestampAlgorithmStart = System.currentTimeMillis(); this.setDeadline(); // algorithm execution starts NOW, set deadline /* check number of CPUs assigned */ if (this.getConfig().cpus() < 1) { throw new IllegalStateException("Cannot generate search where number of CPUs is " + this.getConfig().cpus()); } /* set up exact splits */ double portionForSelection = this.getConfig().dataPortionForSelection(); Pair<ILabeledDataset<?>, ILabeledDataset<?>> split = MLPlanUtil.getDataForSearchAndSelection(this.getInput(), portionForSelection, new Random(this.getConfig().seed()), this.builder.getSearchSelectionDatasetSplitter(), this.logger); ILabeledDataset<?> dataShownToSearch = split.getX(); ILabeledDataset<?> dataShownToSelection = split.getY(); /* check that class proportions are maintained */ if (this.logger.isDebugEnabled()) { this.logger.debug("Class distribution is {}. Original class distribution was {}", DatasetUtil.getLabelCounts(dataShownToSearch), DatasetUtil.getLabelCounts(this.getInput())); } /* check that reconstructibility is preserved */ if (this.maintainReconstructibility && ((IReconstructible) dataShownToSearch).getConstructionPlan().getInstructions().isEmpty()) { throw new IllegalStateException("Reconstructibility instructions have been lost in search/selection-split!"); } /* dynamically compute blow-ups */ if (Double.isNaN(this.getConfig().expectedBlowupInSelection())) { double blowUpInSelectionPhase = (1 + portionForSelection) * 1.5; // assume super-linear runime increase this.getConfig().setProperty(TwoPhaseHASCOConfig.K_BLOWUP_SELECTION, String.valueOf(blowUpInSelectionPhase)); this.logger.info("No expected blow-up for selection phase has been defined. Automatically configuring {}", blowUpInSelectionPhase); } if (!this.buildSelectedClasifierOnGivenData) { this.getConfig().setProperty(TwoPhaseHASCOConfig.K_BLOWUP_POSTPROCESS, String.valueOf(0)); this.logger.info("Selected classifier won't be built, so now blow-up is calculated."); } else if (Double.isNaN(this.getConfig().expectedBlowupInPostprocessing())) { double blowUpInPostprocessing = ((1.0 + portionForSelection) / 0.8) * 1.5; // the 1.5 are for a supposed super-linear runtime increase this.getConfig().setProperty(TwoPhaseHASCOConfig.K_BLOWUP_POSTPROCESS, String.valueOf(blowUpInPostprocessing)); this.logger.info("No expected blow-up for postprocessing phase has been defined. Automatically configuring {}", blowUpInPostprocessing); } /* setup the pipeline evaluators */ this.logger.debug("Setting up the pipeline evaluators."); Pair<PipelineEvaluator, PipelineEvaluator> evaluators; try { evaluators = MLPlanUtil.getPipelineEvaluators(this.builder.getLearnerEvaluationFactoryForSearchPhase(), this.builder.getMetricForSearchPhase(), this.builder.getLearnerEvaluationFactoryForSelectionPhase(), this.builder.getMetricForSelectionPhase(), new Random(this.seed), dataShownToSearch, dataShownToSelection, this.builder.getSafeGuardFactory(), this.builder.getLearnerFactory(), this.getConfig().getTimeoutForCandidateEvaluation()); } catch (LearnerEvaluatorConstructionFailedException e2) { throw new AlgorithmException("Could not create the evaluators.", e2); } this.classifierEvaluatorForSearch = evaluators.getX(); this.classifierEvaluatorForSelection = evaluators.getY(); this.classifierEvaluatorForSearch.registerListener(this); // events will be forwarded if (this.classifierEvaluatorForSearch.getSafeGuard() != null) { this.classifierEvaluatorForSearch.getSafeGuard().registerListener(this); } if (this.classifierEvaluatorForSelection != null) { this.classifierEvaluatorForSelection.registerListener(this); // events will be forwarded } /* communicate the parameters with which ML-Plan will run */ if (this.logger.isInfoEnabled()) { this.logger.info( "Starting ML-Plan with the following setup:\n\tDataset: {}\n\tCPUs: {}\n\tTimeout: {}s\n\tRemaining Time after initialization: {}s\n\tTimeout Precaution Offset: {}s\n\tTimeout for single candidate evaluation: {}s\n\tTimeout for node evaluation: {}s\n\tRandom Completions per node evaluation: {}\n\tPortion of data for selection phase: {}%\n\tData points used during search: {}\n\tData points used during selection: {}\n\tPipeline evaluation during search: {}\n\tPipeline evaluation during selection: {}\n\tBlow-ups are {} for selection phase and {} for post-processing phase.", this.getInput().getRelationName(), this.getConfig().cpus(), this.getTimeout().seconds(), this.getRemainingTimeToDeadline().seconds(), this.getConfig().precautionOffset(), this.getConfig().timeoutForCandidateEvaluation() / 1000, this.getConfig().timeoutForNodeEvaluation() / 1000, this.getConfig().numberOfRandomCompletions(), MathExt.round(this.getConfig().dataPortionForSelection() * 100, 2), dataShownToSearch.size(), dataShownToSelection != null ? dataShownToSelection.size() : 0, this.classifierEvaluatorForSearch.getBenchmark(), this.classifierEvaluatorForSelection != null ? this.classifierEvaluatorForSelection.getBenchmark() : null, this.getConfig().expectedBlowupInSelection(), this.getConfig().expectedBlowupInPostprocessing()); } /* create 2-phase software configuration problem */ this.logger.debug("Creating 2-phase software configuration problem."); TwoPhaseSoftwareConfigurationProblem problem = null; try { problem = new TwoPhaseSoftwareConfigurationProblem(this.builder.getSearchSpaceConfigFile(), this.builder.getRequestedInterface(), this.classifierEvaluatorForSearch, this.classifierEvaluatorForSelection); } catch (IOException e1) { throw new AlgorithmException("Could not create the 2-phase configuration problem with search space file \"" + this.builder.getSearchSpaceConfigFile() + "\" and required interface " + this.builder.getRequestedInterface(), e1); } /* create 2-phase HASCO */ this.logger.info("Creating the twoPhaseHASCOFactory."); OptimizingFactoryProblem<TwoPhaseSoftwareConfigurationProblem, L, Double> optimizingFactoryProblem = new OptimizingFactoryProblem<>(this.builder.getLearnerFactory(), problem); HASCOViaFDBuilder<Double, ?> hascoFactory = this.builder.getHASCOFactory(); this.twoPhaseHASCOFactory = new TwoPhaseHASCOBuilder<>(hascoFactory); this.twoPhaseHASCOFactory.setConfig(this.getConfig().copy(TwoPhaseHASCOConfig.class)); // instantiate 2-Phase-HASCO with a config COPY to not have config changes in 2-Phase-HASCO impacts on the MLPlan configuration this.optimizingFactory = new OptimizingFactory<>(optimizingFactoryProblem, this.twoPhaseHASCOFactory); if (this.loggerName != null) { this.logger.info("Setting logger of {} to {}.optimizingfactory", this.optimizingFactory.getClass().getName(), this.loggerName); this.optimizingFactory.setLoggerName(this.loggerName + ".optimizingfactory"); } final double dataPortionUsedForSelection = this.getConfig().dataPortionForSelection(); this.optimizingFactory.registerListener(new Object() { @Subscribe public void receiveEventFromFactory(final IEvent event) throws InterruptedException { if (event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent) { return; } if (event instanceof TwoPhaseHASCOPhaseSwitchEvent) { MLPlan.this.post(new MLPlanPhaseSwitchedEvent(MLPlan.this)); } else if (event instanceof HASCOSolutionEvent) { HASCOSolutionCandidate<Double> solution = ((HASCOSolutionEvent<Double>) event).getSolutionCandidate(); try { MLPlan.this.logger.info("Received new solution {} with score {} and evaluation time {}ms", MLPlan.this.serializer.serialize(solution.getComponentInstance()), solution.getScore(), solution.getTimeToEvaluateCandidate()); } catch (Exception e) { MLPlan.this.logger.warn("Could not print log due to exception while preparing the log message.", e); } if (dataPortionUsedForSelection == 0.0 && solution.getScore() < MLPlan.this.internalValidationErrorOfSelectedClassifier) { try { MLPlan.this.selectedClassifier = MLPlan.this.builder.getLearnerFactory().getComponentInstantiation(solution.getComponentInstance()); MLPlan.this.internalValidationErrorOfSelectedClassifier = solution.getScore(); MLPlan.this.componentInstanceOfSelectedClassifier = solution.getComponentInstance(); } catch (ComponentInstantiationFailedException e) { MLPlan.this.logger.error("Could not update selectedClassifier with newly best seen solution due to issues building the classifier from its ComponentInstance description.", e); } } try { MLPlan.this.post(new ClassifierFoundEvent(MLPlan.this, solution.getComponentInstance(), MLPlan.this.builder.getLearnerFactory().getComponentInstantiation(solution.getComponentInstance()), solution.getScore(), solution.getTimeToEvaluateCandidate())); } catch (ComponentInstantiationFailedException e) { MLPlan.this.logger.error("An issue occurred while preparing the description for the post of a ClassifierFoundEvent", e); } } else { MLPlan.this.post(event); } } }); Timeout remainingTimeConsideringPrecaution = new Timeout(this.getRemainingTimeToDeadline().seconds() - this.getConfig().precautionOffset(), TimeUnit.SECONDS); this.logger.info("Initializing the optimization factory with timeout {}.", remainingTimeConsideringPrecaution); this.optimizingFactory.setTimeout(remainingTimeConsideringPrecaution); this.optimizingFactory.init(); AlgorithmInitializedEvent event = this.activate(); this.logger.info("Started and activated ML-Plan."); return event; case ACTIVE: /* train the classifier returned by the optimizing factory */ long startOptimizationTime = System.currentTimeMillis(); try { this.selectedClassifier = this.optimizingFactory.call(); this.logger.info("2-Phase-HASCO has chosen classifier {}, which will now be built on the entire data given, i.e. {} data points.", this.selectedClassifier, this.getInput().size()); } catch (AlgorithmException | InterruptedException | AlgorithmExecutionCanceledException | AlgorithmTimeoutedException e) { this.terminate(); // send the termination event throw e; } this.internalValidationErrorOfSelectedClassifier = this.optimizingFactory.getPerformanceOfObject(); this.componentInstanceOfSelectedClassifier = this.optimizingFactory.getComponentInstanceOfObject(); if (this.buildSelectedClasifierOnGivenData) { long startBuildTime = System.currentTimeMillis(); try { this.selectedClassifier.fit(this.getInput()); } catch (Exception e) { throw new AlgorithmException("Training the classifier failed!", e); } long endBuildTime = System.currentTimeMillis(); this.logger.info( "Selected model has been built on entire dataset. Build time of chosen model was {}ms. Total construction time was {}ms ({}ms of that on preparation and {}ms on essential optimization). The chosen classifier is: {}", endBuildTime - startBuildTime, endBuildTime - this.timestampAlgorithmStart, startOptimizationTime - this.timestampAlgorithmStart, endBuildTime - startOptimizationTime, this.selectedClassifier); } else { this.logger.info("Selected model has not been built, since model building has been disabled. Total construction time was {}ms.", System.currentTimeMillis() - startOptimizationTime); } return this.terminate(); default: throw new IllegalStateException("Cannot do anything in state " + this.getState()); } } @Override public L call() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException { while (this.hasNext()) { this.nextWithException(); } return this.selectedClassifier; } @Override public void setLoggerName(final String name) { this.loggerName = name; super.setLoggerName(name + "._algorithm"); this.logger.info("Switching logger name to {}", name); this.logger = LoggerFactory.getLogger(name); this.logger.info("Activated ML-Plan logger {}. Now setting logger of twoPhaseHASCO to {}.2phasehasco", name, name); if (this.optimizingFactory != null) { this.logger.info("Setting logger of {} to {}.optimizingfactory", this.optimizingFactory.getClass().getName(), this.loggerName); this.optimizingFactory.setLoggerName(this.loggerName + ".optimizingfactory"); } else { this.logger.debug("Optimizingfactory has not been set yet, so not customizing its logger."); } this.serializer.setLoggerName(name + ".ser"); this.logger.info("Switched ML-Plan logger to {}", name); } public void setPortionOfDataForPhase2(final double portion) { this.getConfig().setProperty(IMLPlanClassifierConfig.SELECTION_PORTION, String.valueOf(portion)); } @Override public String getLoggerName() { return this.loggerName; } @Override public IMLPlanClassifierConfig getConfig() { return (IMLPlanClassifierConfig) super.getConfig(); } public void setRandomSeed(final int seed) { this.getConfig().setProperty(TwoPhaseHASCOConfig.K_RANDOM_SEED, String.valueOf(seed)); } public L getSelectedClassifier() { return this.selectedClassifier; } public IComponentInstance getComponentInstanceOfSelectedClassifier() { return this.componentInstanceOfSelectedClassifier; } public IPathSearchInput<TFDNode, String> getSearchProblemInputGenerator() { this.initializeIfNotDone(); return ((TwoPhaseHASCO<TFDNode, String>) this.optimizingFactory.getOptimizer()).getGraphSearchInput(); } public double getInternalValidationErrorOfSelectedClassifier() { return this.internalValidationErrorOfSelectedClassifier; } @Override public synchronized void cancel() { this.logger.info("Received cancel. First canceling optimizer, then invoking general shutdown."); this.optimizingFactory.cancel(); this.logger.debug("Now canceling main ML-Plan routine"); super.cancel(); assert this.isCanceled() : "Canceled-flag is not positive at the end of the cancel routine!"; this.logger.info("Completed cancellation of ML-Plan. Cancel status is {}", this.isCanceled()); } public OptimizingFactory<TwoPhaseSoftwareConfigurationProblem, L, HASCOSolutionCandidate<Double>, Double> getOptimizingFactory() { return this.optimizingFactory; } public HASCO<?, ?, ?> getHASCO() { this.initializeIfNotDone(); return ((TwoPhaseHASCO<?, ?>) this.optimizingFactory.getOptimizer()).getHasco(); } public IAlgorithm<?, ?> getSearch() { this.initializeIfNotDone(); return this.getHASCO().getSearch(); } private void initializeIfNotDone() { if (this.getState() == EAlgorithmState.CREATED) { this.next(); // initialize } } public PipelineEvaluator getClassifierEvaluatorForSearch() { return this.classifierEvaluatorForSearch; } public PipelineEvaluator getClassifierEvaluatorForSelection() { return this.classifierEvaluatorForSelection; } @Subscribe public void receiveEvent(final IEvent e) { this.post(e); } public TwoPhaseHASCOBuilder<TFDNode, String> getTwoPhaseHASCOFactory() { return this.twoPhaseHASCOFactory; } public boolean isBuildSelectedClasifierOnGivenData() { return this.buildSelectedClasifierOnGivenData; } public void setBuildSelectedClasifierOnGivenData(final boolean buildSelectedClasifierOnGivenData) { this.buildSelectedClasifierOnGivenData = buildSelectedClasifierOnGivenData; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/MLPlanSimpleBuilder.java
package ai.libs.mlplan.core; import java.io.IOException; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.exceptions.ComponentInstantiationFailedException; import ai.libs.jaicore.ml.classification.loss.dataset.EClassificationPerformanceMeasure; import ai.libs.jaicore.ml.classification.singlelabel.learner.MajorityClassifier; import ai.libs.jaicore.ml.core.filter.FilterBasedDatasetSplitter; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.LabelBasedStratifiedSamplingFactory; public class MLPlanSimpleBuilder extends AMLPlanBuilder<IClassifier, MLPlanSimpleBuilder> { public MLPlanSimpleBuilder() throws IOException { super(new IProblemType<IClassifier>() { @Override public String getSearchSpaceConfigFromFileSystem() { return ""; } @Override public String getSearchSpaceConfigFileFromResource() { return "mlplan/mlplan-simple.searchspace.json"; } @Override public String getRequestedInterface() { return "AbstractClassifier"; } @Override public String getLastHASCOMethodPriorToParameterRefinementOfBareLearner() { return null; } @Override public String getPreferredComponentListFromResource() { return null; } @Override public String getPreferredComponentListFromFileSystem() { return null; } @Override public String getLastHASCOMethodPriorToParameterRefinementOfPipeline() { return null; } @Override public IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> getSearchSelectionDatasetSplitter() { return new FilterBasedDatasetSplitter<>(new LabelBasedStratifiedSamplingFactory<>()); } @Override public IDeterministicPredictionPerformanceMeasure<?, ?> getPerformanceMetricForSelectionPhase() { return EClassificationPerformanceMeasure.ERRORRATE; } @Override public IDeterministicPredictionPerformanceMeasure<?, ?> getPerformanceMetricForSearchPhase() { return EClassificationPerformanceMeasure.ERRORRATE; } @Override public String getName() { return "SimpleProblemType"; } @Override public ILearnerFactory<IClassifier> getLearnerFactory() { return new ILearnerFactory<IClassifier>() { @Override public IClassifier getComponentInstantiation(final IComponentInstance groundComponent) throws ComponentInstantiationFailedException { return new MajorityClassifier(); } }; } @Override public PipelineValidityCheckingNodeEvaluator getValidityCheckingNodeEvaluator() { return null; } }); /* configure dataset splitter */ this.withDatasetSplitterForSearchSelectionSplit(new FilterBasedDatasetSplitter<>(new LabelBasedStratifiedSamplingFactory<>(), .9, new Random(0))); this.withMCCVBasedCandidateEvaluationInSearchPhase(3, .7); this.withMCCVBasedCandidateEvaluationInSelectionPhase(3, .7); } @Override public MLPlanSimpleBuilder getSelf() { return this; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/MLPlanUtil.java
package ai.libs.mlplan.core; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Random; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.ml.core.IDataConfigurable; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.IPredictionPerformanceMetricConfigurable; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.Timeout; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.control.IRandomConfigurable; import org.slf4j.Logger; import ai.libs.hasco.builder.HASCOBuilder; import ai.libs.hasco.builder.forwarddecomposition.HASCOViaFDAndBestFirstWithRandomCompletionsBuilder; import ai.libs.jaicore.basic.MathExt; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.ISupervisedLearnerEvaluatorFactory; import ai.libs.jaicore.ml.core.evaluation.evaluator.factory.LearnerEvaluatorConstructionFailedException; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.AlternativeNodeEvaluator; import ai.libs.mlplan.multiclass.IMLPlanClassifierConfig; import ai.libs.mlplan.safeguard.IEvaluationSafeGuard; import ai.libs.mlplan.safeguard.IEvaluationSafeGuardFactory; /** * The methods in this util are mainly used in the MLPlan algorithm itself but outsourced in order to improve readability and testability. * * @author Felix Mohr * */ abstract class MLPlanUtil { private MLPlanUtil() { /* avoid instantiation */ } public static Pair<ILabeledDataset<?>, ILabeledDataset<?>> getDataForSearchAndSelection(final ILabeledDataset<?> dataset, final double dataPortionUsedForSelection, final Random random, final IFoldSizeConfigurableRandomDatasetSplitter<ILabeledDataset<?>> splitter, final Logger logger) throws InterruptedException, AlgorithmException { ILabeledDataset<?> dataShownToSearch; ILabeledDataset<?> dataShownToSelection; if (dataPortionUsedForSelection > 0) { try { if (splitter == null) { throw new IllegalArgumentException("The builder does not specify a dataset splitter for the separation between search and selection phase data."); } logger.debug("Splitting given {} data points into search data ({}%) and selection data ({}%) with splitter {}.", dataset.size(), MathExt.round((1 - dataPortionUsedForSelection) * 100, 2), MathExt.round(dataPortionUsedForSelection * 100, 2), splitter.getClass().getName()); if (splitter instanceof ILoggingCustomizable) { ((ILoggingCustomizable) splitter).setLoggerName(logger.getName() + ".searchselectsplitter"); } List<ILabeledDataset<?>> split = splitter.split(dataset, random, dataPortionUsedForSelection); final int expectedSearchSize = (int) Math.round(dataset.size() * (1 - dataPortionUsedForSelection)); // attention; this is a bit tricky (data portion for selection is in 0) final int expectedSelectionSize = dataset.size() - expectedSearchSize; if (Math.abs(expectedSearchSize - split.get(1).size()) > 1 || Math.abs(expectedSelectionSize - split.get(0).size()) > 1) { throw new IllegalStateException("Invalid split produced by " + splitter.getClass().getName() + "! Split sizes are " + split.get(1).size() + "/" + split.get(0).size() + " but expected sizes were " + expectedSearchSize + "/" + expectedSelectionSize); } dataShownToSearch = split.get(1); // attention; this is a bit tricky (data portion for selection is in 0) dataShownToSelection = dataset; logger.debug("Search/Selection split completed. Using {} data points in search and {} in selection.", dataShownToSearch.size(), dataShownToSelection.size()); } catch (SplitFailedException e) { throw new AlgorithmException("Error in ML-Plan execution.", e); } } else { dataShownToSearch = dataset; dataShownToSelection = null; logger.debug("Selection phase de-activated. Not splitting the data and giving everything to the search."); } if (dataShownToSearch.isEmpty()) { throw new IllegalStateException("Cannot search on no data."); } if (dataShownToSelection != null && dataShownToSelection.size() < dataShownToSearch.size()) { throw new IllegalStateException("The search data (" + dataShownToSearch.size() + " data points) are bigger than the selection data (" + dataShownToSelection.size() + " data points)!"); } return new Pair<>(dataShownToSearch, dataShownToSelection); } public static Pair<PipelineEvaluator, PipelineEvaluator> getPipelineEvaluators(final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactoryForSearch, final IDeterministicPredictionPerformanceMeasure<?, ?> metricForSearch, final ISupervisedLearnerEvaluatorFactory<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> evaluatorFactoryForSelection, final IDeterministicPredictionPerformanceMeasure<?, ?> metricForSelection, final Random random, final ILabeledDataset<?> dataShownToSearch, final ILabeledDataset<?> dataShownToSelection, final IEvaluationSafeGuardFactory safeGuardFactory, final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory, final Timeout timeoutForCandidateEvaluation) throws InterruptedException, AlgorithmException, LearnerEvaluatorConstructionFailedException { /* set random source and data for the evaluator factories */ if (evaluatorFactoryForSearch instanceof IPredictionPerformanceMetricConfigurable) { ((IPredictionPerformanceMetricConfigurable) evaluatorFactoryForSearch).setMeasure(metricForSearch); } if (evaluatorFactoryForSearch instanceof IRandomConfigurable) { ((IRandomConfigurable) evaluatorFactoryForSearch).setRandom(random); } if (evaluatorFactoryForSearch instanceof IDataConfigurable) { ((IDataConfigurable) evaluatorFactoryForSearch).setData(dataShownToSearch); } if (evaluatorFactoryForSelection instanceof IPredictionPerformanceMetricConfigurable) { ((IPredictionPerformanceMetricConfigurable) evaluatorFactoryForSelection).setMeasure(metricForSelection); } if (evaluatorFactoryForSelection instanceof IRandomConfigurable) { ((IRandomConfigurable) evaluatorFactoryForSelection).setRandom(random); } if (evaluatorFactoryForSelection instanceof IDataConfigurable && dataShownToSelection != null) { ((IDataConfigurable) evaluatorFactoryForSelection).setData(dataShownToSelection); } /* create pipeline evaluator for search phase */ ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<?>> searchEvaluator = evaluatorFactoryForSearch.getLearnerEvaluator(); PipelineEvaluator classifierEvaluatorForSearch = new PipelineEvaluator(learnerFactory, searchEvaluator, timeoutForCandidateEvaluation); if (safeGuardFactory != null) { safeGuardFactory.withEvaluator(searchEvaluator); try { IEvaluationSafeGuard safeGuard = safeGuardFactory.build(); classifierEvaluatorForSearch.setSafeGuard(safeGuard); } catch (InterruptedException e) { throw e; } catch (Exception e) { throw new AlgorithmException("Could not build safe guard.", e); } } /* create pipeline evaluator for selection phase */ PipelineEvaluator classifierEvaluatorForSelection = dataShownToSelection != null ? new PipelineEvaluator(learnerFactory, evaluatorFactoryForSelection.getLearnerEvaluator(), timeoutForCandidateEvaluation) : null; return new Pair<>(classifierEvaluatorForSearch, classifierEvaluatorForSelection); } public static HASCOViaFDAndBestFirstWithRandomCompletionsBuilder getHASCOBuilder(final IMLPlanClassifierConfig algorithmConfig, final ILabeledDataset<?> dataset, final File searchSpaceFile, final String requestedHASCOInterface, final Predicate<TFDNode> priorizingPredicate, final List<IPathEvaluator<TFDNode, String, Double>> preferredNodeEvaluators, final PipelineValidityCheckingNodeEvaluator pipelineValidityCheckingNodeEvaluator, final String nameOfMethod1, final String nameOfMethod2) { /* compile software composition problem and create the builder */ RefinementConfiguredSoftwareConfigurationProblem<Double> problem; try { problem = new RefinementConfiguredSoftwareConfigurationProblem<>(searchSpaceFile, requestedHASCOInterface, null); } catch (IOException e) { throw new IllegalArgumentException("Invalid configuration file " + searchSpaceFile, e); } HASCOViaFDAndBestFirstWithRandomCompletionsBuilder hascoBuilder = HASCOBuilder.get(problem).withBestFirst().withRandomCompletions(); /* now configure the chain of preferred node evaluators (taking in account that the ones about checking validity and preferred components are the most important one) */ List<IPathEvaluator<TFDNode, String, Double>> neChain = new ArrayList<>(); if (pipelineValidityCheckingNodeEvaluator != null) { pipelineValidityCheckingNodeEvaluator.setComponents(problem.getComponents()); pipelineValidityCheckingNodeEvaluator.setData(dataset); neChain.add(pipelineValidityCheckingNodeEvaluator); } if (algorithmConfig.preferredComponents() != null && !algorithmConfig.preferredComponents().isEmpty()) { Objects.requireNonNull(nameOfMethod1, "First HASCO method must not be null!"); Objects.requireNonNull(nameOfMethod2, "Second HASCO method must not be null!"); neChain.add(new PreferenceBasedNodeEvaluator(problem.getComponents(), algorithmConfig.preferredComponents(), nameOfMethod1, nameOfMethod2)); } neChain.addAll(preferredNodeEvaluators); if (!neChain.isEmpty()) { IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator = neChain.remove(0); for (IPathEvaluator<TFDNode, String, Double> ne : neChain) { preferredNodeEvaluator = new AlternativeNodeEvaluator<>(preferredNodeEvaluator, ne); } hascoBuilder.withPreferredNodeEvaluator(preferredNodeEvaluator); } hascoBuilder.withNumSamples(algorithmConfig.numberOfRandomCompletions()); hascoBuilder.withSeed(algorithmConfig.seed()); hascoBuilder.withTimeoutForNode(new Timeout(algorithmConfig.timeoutForNodeEvaluation(), TimeUnit.MILLISECONDS)); hascoBuilder.withTimeoutForSingleEvaluation(new Timeout(algorithmConfig.timeoutForCandidateEvaluation(), TimeUnit.MILLISECONDS)); hascoBuilder.withPriorizingPredicate(priorizingPredicate); return hascoBuilder; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/NoModelBuiltException.java
package ai.libs.mlplan.core; public class NoModelBuiltException extends Exception { /** * */ private static final long serialVersionUID = 5348375646068721111L; public NoModelBuiltException() { super(); } public NoModelBuiltException(final String message) { super(message); } public NoModelBuiltException(final Throwable cause) { super(cause); } public NoModelBuiltException(final String message, final Throwable cause) { super(message, cause); } public NoModelBuiltException(final String message, final Throwable cause, final boolean enableSuppression, final boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/PipelineEvaluator.java
package ai.libs.mlplan.core; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.Timeout; import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.event.IEvent; import org.api4.java.common.event.IEventEmitter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.EventBus; import com.google.common.eventbus.Subscribe; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.exceptions.ComponentInstantiationFailedException; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.components.model.ComponentInstanceUtil; import ai.libs.jaicore.timing.TimedObjectEvaluator; import ai.libs.mlplan.core.events.SupervisedLearnerCreatedEvent; import ai.libs.mlplan.core.events.TimeTrackingLearnerEvaluationEvent; import ai.libs.mlplan.safeguard.AlwaysEvaluateSafeGuard; import ai.libs.mlplan.safeguard.EvaluationSafeGuardException; import ai.libs.mlplan.safeguard.EvaluationSafeGuardFiredEvent; import ai.libs.mlplan.safeguard.IEvaluationSafeGuard; /** * Evaluator used in the search phase of mlplan. * * @author fmohr */ public class PipelineEvaluator extends TimedObjectEvaluator<IComponentInstance, Double> implements ILoggingCustomizable { private static final String DEFAULT_PIPELINE_EVALUATOR_ID = "PipelineEvaluator"; private String pipelineEvaluatorID = DEFAULT_PIPELINE_EVALUATOR_ID; private Logger logger = LoggerFactory.getLogger(PipelineEvaluator.class); private final EventBus eventBus = new EventBus(); private final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory; private final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> benchmark; private final Timeout timeoutForEvaluation; private IEvaluationSafeGuard safeGuard; public PipelineEvaluator(final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory, final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> benchmark, final Timeout timeoutForEvaluation) { this(learnerFactory, benchmark, timeoutForEvaluation, new AlwaysEvaluateSafeGuard()); } @SuppressWarnings("unchecked") public PipelineEvaluator(final ILearnerFactory<? extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>> learnerFactory, final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> benchmark, final Timeout timeoutForEvaluation, final IEvaluationSafeGuard safeGuard) { super(); this.learnerFactory = learnerFactory; this.benchmark = benchmark; if (benchmark instanceof IEventEmitter) { ((IEventEmitter<PipelineEvaluator>) benchmark).registerListener(this); } this.timeoutForEvaluation = timeoutForEvaluation; this.safeGuard = safeGuard; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger.info("Switching logger name from {} to {}", this.logger.getName(), name); this.logger = LoggerFactory.getLogger(name); if (this.benchmark instanceof ILoggingCustomizable) { this.logger.info("Setting logger name of actual benchmark {} to {}.benchmark", this.benchmark.getClass().getName(), name); ((ILoggingCustomizable) this.benchmark).setLoggerName(name + ".benchmark"); } else { this.logger.info("Benchmark {} does not implement ILoggingCustomizable, not customizing its logger.", this.benchmark.getClass().getName()); } } @Override public Double evaluateSupervised(final IComponentInstance c) throws InterruptedException, ObjectEvaluationFailedException { this.logger.debug("Received request to evaluate component instance {}", c); this.logger.debug("Query evaluation safe guard whether to evaluate this component instance for the given timeout {}.", this.timeoutForEvaluation); try { if (!this.safeGuard.predictWillAdhereToTimeout(c, this.timeoutForEvaluation)) { this.eventBus.post(new EvaluationSafeGuardFiredEvent(c)); throw new EvaluationSafeGuardException("Evaluation safe guard prevents evaluation of component instance.", c); } } catch (EvaluationSafeGuardException | InterruptedException e) { throw e; } catch (Exception e) { this.logger.error("Could not use evaluation safe guard for component instance of {}. Continue with business as usual. Here is the stacktrace:", ComponentInstanceUtil.toComponentNameString(c), e); } try { this.logger.debug("Instantiate learner from component instance."); ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> learner = this.learnerFactory.getComponentInstantiation(c); this.eventBus.post(new SupervisedLearnerCreatedEvent(c, learner)); // inform listeners about the creation of the classifier ITimeTrackingLearner trackableLearner = new TimeTrackingLearnerWrapper(c, learner); if (c instanceof ComponentInstance) { trackableLearner.setPredictedInductionTime(((ComponentInstance) c).getAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INDUCTION_TIME)); trackableLearner.setPredictedInferenceTime(((ComponentInstance) c).getAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INFERENCE_TIME)); } if (this.logger.isDebugEnabled()) { this.logger.debug("Starting benchmark {} for classifier {}", this.benchmark, learner); } Double score = this.benchmark.evaluate(trackableLearner); trackableLearner.setScore(score); if (this.logger.isInfoEnabled()) { this.logger.info("Obtained score {} for classifier {}", score, learner); } this.eventBus.post(new TimeTrackingLearnerEvaluationEvent(trackableLearner)); this.safeGuard.updateWithActualInformation(c, trackableLearner); return score; } catch (ComponentInstantiationFailedException e) { throw new ObjectEvaluationFailedException("Evaluation of composition failed as the component instantiation could not be built.", e); } } @Override public Timeout getTimeout(final IComponentInstance item) { return this.timeoutForEvaluation; } @Override public String getMessage(final IComponentInstance item) { return "Pipeline evaluation phase"; } public ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getBenchmark() { return this.benchmark; } public void setSafeGuard(final IEvaluationSafeGuard safeGuard) { if (safeGuard != null) { this.safeGuard = safeGuard; } else { this.safeGuard = new AlwaysEvaluateSafeGuard(); } } public void setPipelineEvaluatorID(final String pipelineEvaluatorID) { this.pipelineEvaluatorID = pipelineEvaluatorID; } public String getPipelineEvaluatorID() { return this.pipelineEvaluatorID; } public IEvaluationSafeGuard getSafeGuard() { return this.safeGuard; } /** * Here, we send a coupling event that informs the listener about which ComponentInstance has been used to create a classifier. * * @param listener */ public void registerListener(final Object listener) { this.eventBus.register(listener); } /** * Forwards every incoming event e * * @param e */ @Subscribe public void receiveEvent(final IEvent e) { this.eventBus.post(e); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/PipelineValidityCheckingNodeEvaluator.java
package ai.libs.mlplan.core; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Objects; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.ml.core.dataset.schema.attribute.IAttribute; import org.api4.java.ai.ml.core.dataset.schema.attribute.ICategoricalAttribute; import org.api4.java.ai.ml.core.dataset.schema.attribute.INumericAttribute; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; public abstract class PipelineValidityCheckingNodeEvaluator implements IPathEvaluator<TFDNode, String, Double> { private ILabeledDataset<?> data; private List<IComponent> components; /* the predicates of the dataset */ protected boolean propertiesDetermined; protected boolean binaryClass; protected boolean multiClass; protected boolean regression; protected boolean multiValuedNominalAttributes; protected boolean containsNegativeValues; public PipelineValidityCheckingNodeEvaluator() { } public PipelineValidityCheckingNodeEvaluator(final Collection<? extends IComponent> components, final ILabeledDataset<?> data) { this.setComponents(components); this.setData(data); this.extractDatasetProperties(); } protected synchronized void extractDatasetProperties() { if (!this.propertiesDetermined) { if (this.getComponents() == null) { throw new IllegalStateException("Components not defined!"); } /* compute binary class predicate */ this.binaryClass = this.getData().getLabelAttribute() instanceof ICategoricalAttribute && ((ICategoricalAttribute) this.getData().getLabelAttribute()).getNumberOfCategories() == 2; this.multiClass = this.getData().getLabelAttribute() instanceof ICategoricalAttribute && ((ICategoricalAttribute) this.getData().getLabelAttribute()).getNumberOfCategories() > 2; this.regression = this.getData().getLabelAttribute() instanceof INumericAttribute; /* determine whether the dataset is multi-valued nominal */ this.multiValuedNominalAttributes = false; for (IAttribute att : this.getData().getListOfAttributes()) { if (att instanceof ICategoricalAttribute && ((ICategoricalAttribute) att).getNumberOfCategories() > 2) { this.multiValuedNominalAttributes = true; break; } } /* determine whether dataset contains negative attribute values */ this.containsNegativeValues = false; for (ILabeledInstance i : this.getData()) { this.containsNegativeValues = this.containsNegativeValues || Arrays.stream(i.getPoint()).anyMatch(x -> x < 0); if (this.containsNegativeValues) { break; } } this.propertiesDetermined = true; } } public void setData(final ILabeledDataset<?> data) { Objects.requireNonNull(data); this.data = data; } public void setComponents(final Collection<? extends IComponent> components) { Objects.requireNonNull(components); this.components = new ArrayList<>(components); } public ILabeledDataset<?> getData() { return this.data; } public Collection<IComponent> getComponents() { return this.components; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/PreferenceBasedNodeEvaluator.java
package ai.libs.mlplan.core; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.datastructure.graph.ILabeledPath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.core.HASCOUtil; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.components.serialization.ComponentSerialization; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.model.travesaltree.BackPointerPath; public class PreferenceBasedNodeEvaluator implements IPathEvaluator<TFDNode, String, Double>, ILoggingCustomizable { public static final String COMPNAME_LEARNER= "learner"; private static final double EXPAND_NODE_SCORE = (-1) * Double.MAX_VALUE; private final Collection<IComponent> components; private final List<String> orderingOfComponents; private final String nameOfMethodToResolveBaseLearner; private final String nameOfMethodToResolveLearnerInPipeline; private Logger logger = LoggerFactory.getLogger(PreferenceBasedNodeEvaluator.class); private boolean sentLogMessageForHavingEnteredSecondSubPhase = false; public PreferenceBasedNodeEvaluator(final Collection<? extends IComponent> components, final List<String> orderingOfComponents, final String nameOfMethodToResolveBareLearner, final String nameOfMethodToResolveLearnerInPipeline) { super(); this.components = new ArrayList<>(components); this.orderingOfComponents = orderingOfComponents; this.nameOfMethodToResolveBaseLearner = nameOfMethodToResolveBareLearner; this.nameOfMethodToResolveLearnerInPipeline = nameOfMethodToResolveLearnerInPipeline; } @Override public Double evaluate(final ILabeledPath<TFDNode, String> n) { try { this.logger.info("Received request for node evaluation."); List<String> appliedMethods = new LinkedList<>(); for (TFDNode x : n.getNodes()) { if (x.getAppliedMethodInstance() != null) { appliedMethods.add(x.getAppliedMethodInstance().getMethod().getName()); } } this.logger.debug("Determined {} applied methods: {}", appliedMethods.size(), appliedMethods); /* get partial component */ ComponentInstance instance = HASCOUtil.getSolutionCompositionFromState(this.components, n.getHead().getState(), false); boolean isPipeline = appliedMethods.stream().anyMatch(x -> x.toLowerCase().contains("pipeline")); String classifierName = null; /* first check whether any decision about an instance is recognizable. If not, return 0.0 */ if (instance == null) { this.logger.info("No decision recognizable *in state* yet, returning quasi-null score {}", EXPAND_NODE_SCORE); return EXPAND_NODE_SCORE; } /* now check whether the classifier has already been chosen. If the classifier has NOT been chosen, continue with BFS */ String nameOfLastAppliedMethod = appliedMethods.get(appliedMethods.size() - 1); String compactStringOfCI = new ComponentSerialization().serialize(instance).toString(); this.logger.debug("The associated component instance is {}. Constitutes a pipeline? {}. Name of last applied method: {}", compactStringOfCI, isPipeline ? "yes" : "no", nameOfLastAppliedMethod); Double score = EXPAND_NODE_SCORE; if (isPipeline) { if (instance.getSatisfactionOfRequiredInterfaces().containsKey(COMPNAME_LEARNER) && !instance.getSatisfactionOfRequiredInterface(COMPNAME_LEARNER).isEmpty()) { classifierName = instance.getSatisfactionOfRequiredInterface(COMPNAME_LEARNER).iterator().next().getComponent().getName(); } else { this.logger.debug("Exact decision about learner used in the pipeline is recognizable in state yet. Returning {}.", EXPAND_NODE_SCORE); return EXPAND_NODE_SCORE; } } else { classifierName = instance.getComponent().getName(); } this.logger.debug("Identified classifier {}.", classifierName); /* check whether this is the last step before stepping to random completions */ boolean lastMethodBeforeSteppingToRandomCompletions = nameOfLastAppliedMethod.startsWith(this.nameOfMethodToResolveLearnerInPipeline) || nameOfLastAppliedMethod.startsWith(this.nameOfMethodToResolveBaseLearner);; if (lastMethodBeforeSteppingToRandomCompletions) { if (isPipeline) { score /= Math.pow(10.0, this.orderingOfComponents.size() + 1.0); } double exp; if (this.orderingOfComponents.contains(classifierName)) { exp = this.orderingOfComponents.indexOf(classifierName); this.logger.debug("This is a preferred component with preference index {}.", exp); } else { exp = this.orderingOfComponents.size(); this.logger.debug("This is not a preferred component. Preference index will be {}.", exp); } score /= Math.pow(10.0, exp + 1.0); } else { score = null; if (!this.sentLogMessageForHavingEnteredSecondSubPhase) { double scoreOfParent; if ((scoreOfParent = ((BackPointerPath<TFDNode, String, Double>) n.getPathToParentOfHead()).getScore()) > (EXPAND_NODE_SCORE / 1E100)) { this.sentLogMessageForHavingEnteredSecondSubPhase = true; this.logger.info("Entering phase 1b! Breadth first search ends here, because the search is asking for the f-value of a node whose parent has been truely evaluated with an f-value of {}", scoreOfParent); } } } this.logger.info("Returning score {} for instance {}", score, compactStringOfCI); return score; } catch (Exception e) { e.printStackTrace(); throw e; } } @Override public String toString() { return "PreferenceBasedNodeEvaluator [ORDERING_OF_CLASSIFIERS=" + this.orderingOfComponents + "]"; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/TasksAlreadyResolvedPathEvaluator.java
package ai.libs.mlplan.core; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.datastructure.graph.ILabeledPath; import ai.libs.jaicore.logic.fol.structure.Literal; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; public class TasksAlreadyResolvedPathEvaluator implements IPathEvaluator<TFDNode, String, Double> { private Set<String> prefixesWhichHaveToBeResolvedBeforeGoingToNextPhase = new HashSet<>(); public TasksAlreadyResolvedPathEvaluator(final List<String> prefixesWhichHaveToBeResolvedBeforeGoingToNextPhase) { this.prefixesWhichHaveToBeResolvedBeforeGoingToNextPhase = new HashSet<>(prefixesWhichHaveToBeResolvedBeforeGoingToNextPhase); } @Override public Double evaluate(final ILabeledPath<TFDNode, String> path) throws PathEvaluationException, InterruptedException { Set<String> openTasks = path.getHead().getRemainingTasks().stream().map(Literal::getPropertyName).collect(Collectors.toSet()); for (String prefix : this.prefixesWhichHaveToBeResolvedBeforeGoingToNextPhase) { if (openTasks.stream().anyMatch(t -> t.startsWith("1_tResolve" + prefix))) { return 0d; } } return null; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/TimeTrackingLearnerWrapper.java
package ai.libs.mlplan.core; import java.util.ArrayList; import java.util.List; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.IPrediction; import org.api4.java.ai.ml.core.evaluation.IPredictionBatch; import org.api4.java.ai.ml.core.exception.PredictionException; import org.api4.java.ai.ml.core.exception.TrainingException; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.ml.core.learner.ASupervisedLearner; public class TimeTrackingLearnerWrapper extends ASupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>, IPrediction, IPredictionBatch> implements ITimeTrackingLearner, ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(TimeTrackingLearnerWrapper.class); private final ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> wrappedSLClassifier; private IComponentInstance ci; private List<Long> fitTimes; private List<Long> batchPredictTimes; private List<Long> perInstancePredictionTimes; private Double predictedInductionTime = null; private Double predictedInferenceTime = null; private Double score; public TimeTrackingLearnerWrapper(final IComponentInstance ci, final ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> wrappedLearner) { this.ci = ci; this.wrappedSLClassifier = wrappedLearner; this.fitTimes = new ArrayList<>(); this.batchPredictTimes = new ArrayList<>(); this.perInstancePredictionTimes = new ArrayList<>(); } @Override public void fit(final ILabeledDataset<? extends ILabeledInstance> dTrain) throws TrainingException, InterruptedException { TimeTracker tracker = new TimeTracker(); this.wrappedSLClassifier.fit(dTrain); this.fitTimes.add(tracker.stop()); } @Override public IPrediction predict(final ILabeledInstance xTest) throws PredictionException, InterruptedException { TimeTracker tracker = new TimeTracker(); IPrediction prediction = this.wrappedSLClassifier.predict(xTest); this.perInstancePredictionTimes.add(tracker.stop()); return prediction; } @Override public IPredictionBatch predict(final ILabeledInstance[] dTest) throws PredictionException, InterruptedException { TimeTracker tracker = new TimeTracker(); IPredictionBatch prediction = this.wrappedSLClassifier.predict(dTest); long time = tracker.stop(); this.batchPredictTimes.add(time); this.perInstancePredictionTimes.add(Math.round((double) time / dTest.length)); return prediction; } @Override public IPredictionBatch predict(final ILabeledDataset<? extends ILabeledInstance> dTest) throws PredictionException, InterruptedException { TimeTracker tracker = new TimeTracker(); IPredictionBatch prediction = this.wrappedSLClassifier.predict(dTest); long time = tracker.stop(); this.batchPredictTimes.add(time); this.perInstancePredictionTimes.add(Math.round((double) time / dTest.size())); return prediction; } @Override public List<Long> getFitTimes() { return this.fitTimes; } @Override public List<Long> getBatchPredictionTimesInMS() { return this.batchPredictTimes; } @Override public List<Long> getInstancePredictionTimesInMS() { return this.perInstancePredictionTimes; } @Override public IComponentInstance getComponentInstance() { return this.ci; } class TimeTracker { private final long startTime; private TimeTracker() { this.startTime = System.currentTimeMillis(); } public long stop() { return System.currentTimeMillis() - this.startTime; } } @Override public void setPredictedInductionTime(final String inductionTime) { try { this.predictedInductionTime = Double.parseDouble(inductionTime); } catch (Exception e) { this.logger.warn("Could not parse double from provided induction time {}.", inductionTime, e); } } @Override public void setPredictedInferenceTime(final String inferenceTime) { try { this.predictedInferenceTime = Double.parseDouble(inferenceTime); } catch (Exception e) { this.logger.warn("Could not parse double from provided inference time {}.", inferenceTime, e); } } @Override public Double getPredictedInductionTime() { return this.predictedInductionTime; } @Override public Double getPredictedInferenceTime() { return this.predictedInferenceTime; } @Override public void setScore(final Double score) { if (score == null) { return; } this.score = score; } @Override public Double getScore() { return this.score; } @Override public ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearner() { return this.wrappedSLClassifier; } @Override public String toString() { return this.getClass().getName() + " -> " + this.wrappedSLClassifier.toString(); } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); if (this.wrappedSLClassifier instanceof ILoggingCustomizable) { ((ILoggingCustomizable) this.wrappedSLClassifier).setLoggerName(name + ".bl"); } else { this.logger.info("Underlying learner {} is not {}, so not customizing its logger.", this.wrappedSLClassifier.getClass(), ILoggingCustomizable.class); } } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/events/ClassifierFoundEvent.java
package ai.libs.mlplan.core.events; import java.util.HashMap; import java.util.Map; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.IAlgorithm; import org.api4.java.algorithm.events.result.IScoredSolutionCandidateFoundEvent; import ai.libs.jaicore.basic.algorithm.ASolutionCandidateFoundEvent; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.logging.ToJSONStringUtil; public class ClassifierFoundEvent extends ASolutionCandidateFoundEvent<ISupervisedLearner<?, ?>> implements IScoredSolutionCandidateFoundEvent<ISupervisedLearner<?, ?>, Double> { private final double inSampleError; private final IComponentInstance componentDescription; private final int timeToEvaluate; public ClassifierFoundEvent(final IAlgorithm<?, ?> algorithm, final IComponentInstance componentDescription, final ISupervisedLearner<?, ?> solutionCandidate, final double inSampleError, final int timeToEvaluate) { super(algorithm, solutionCandidate); this.inSampleError = inSampleError; this.componentDescription = componentDescription; this.timeToEvaluate = timeToEvaluate; } public double getInSampleError() { return this.inSampleError; } @Override public Double getScore() { return this.inSampleError; } public IComponentInstance getComponentDescription() { return this.componentDescription; } public double getTimeToEvaluate() { return this.timeToEvaluate; } @Override public String toString() { Map<String, Object> fields = new HashMap<>(); fields.put("candidate", super.getSolutionCandidate()); fields.put("componentDescription", this.componentDescription); fields.put("inSampleError", this.inSampleError); fields.put("timeToEvaluate", this.timeToEvaluate); return ToJSONStringUtil.toJSONString(fields); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/events/SupervisedLearnerCreatedEvent.java
package ai.libs.mlplan.core.events; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.common.event.IEvent; import ai.libs.jaicore.components.api.IComponentInstance; public class SupervisedLearnerCreatedEvent implements IEvent { private final IComponentInstance instance; private final ISupervisedLearner<?, ?> classifier; private final long timestamp = System.currentTimeMillis(); public SupervisedLearnerCreatedEvent(final IComponentInstance instance, final ISupervisedLearner<?, ?> classifier) { super(); this.instance = instance; this.classifier = classifier; } public IComponentInstance getInstance() { return this.instance; } public ISupervisedLearner<?, ?> getClassifier() { return this.classifier; } @Override public long getTimestamp() { return this.timestamp; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/core/events/TimeTrackingLearnerEvaluationEvent.java
package ai.libs.mlplan.core.events; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.common.event.IEvent; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.mlplan.core.ITimeTrackingLearner; public class TimeTrackingLearnerEvaluationEvent implements IEvent { private final long timestamp; private final IComponentInstance ci; private final ISupervisedLearner<?, ?> learner; private final Double actualFitTime; private final Double actualPredictTime; private final Double predictedFitTime; private final Double predictedPredictTime; private final Double score; @Override public String toString() { return "CI " + this.ci + " " + this.actualFitTime + " " + this.actualPredictTime + " " + this.predictedFitTime + " " + this.predictedPredictTime + " " + this.score; } public TimeTrackingLearnerEvaluationEvent(final ITimeTrackingLearner timeTrackingLearner) { this.timestamp = System.currentTimeMillis(); this.ci = timeTrackingLearner.getComponentInstance(); this.learner = timeTrackingLearner; this.actualFitTime = (!timeTrackingLearner.getFitTimes().isEmpty()) ? timeTrackingLearner.getFitTimes().stream().mapToDouble(x -> x).average().getAsDouble() / 1000 : null; this.actualPredictTime = (!timeTrackingLearner.getBatchPredictionTimesInMS().isEmpty()) ? timeTrackingLearner.getBatchPredictionTimesInMS().stream().mapToDouble(x -> x).average().getAsDouble() / 1000 : null; this.predictedFitTime = timeTrackingLearner.getPredictedInductionTime(); this.predictedPredictTime = timeTrackingLearner.getPredictedInferenceTime(); this.score = timeTrackingLearner.getScore(); } @Override public long getTimestamp() { return this.timestamp; } public IComponentInstance getComponentInstance() { return this.ci; } public ISupervisedLearner<?, ?> getLearner() { return this.learner; } public Double getActualFitTime() { return this.actualFitTime; } public Double getActualPredictTime() { return this.actualPredictTime; } public Double getPredictedFitTime() { return this.predictedFitTime; } public Double getPredictedPredictTime() { return this.predictedPredictTime; } public Double getScore() { return this.score; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/exception/UnsupportedProblemTypeException.java
package ai.libs.mlplan.exception; import ai.libs.jaicore.ml.core.exception.UncheckedJaicoreMLException; import ai.libs.mlplan.core.IProblemType; /** * The {@link UnsupportedProblemTypeException} indicates that a {@link IProblemType} is chosen, that is unsupported for the ML-Plan version in use. Details concerning the error can be inferred from the associated message. * * @author Tanja Tornede * */ public class UnsupportedProblemTypeException extends UncheckedJaicoreMLException { private static final long serialVersionUID = 1251668494400378438L; /** * Creates a new {@link UnsupportedProblemTypeException} with the given parameters. * * @param message The message of this {@link Exception}. * @param cause The underlying cause of this {@link Exception}. */ public UnsupportedProblemTypeException(final String message, final Throwable cause) { super(message, cause); } /** * Creates a new {@link UnsupportedProblemTypeException} with the given parameters. * * @param message The message of this {@link Exception}. */ public UnsupportedProblemTypeException(final String message) { super(message); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/multiclass/IMLPlanClassifierConfig.java
package ai.libs.mlplan.multiclass; import java.util.List; import org.aeonbits.owner.Config.Sources; import ai.libs.hasco.twophase.TwoPhaseHASCOConfig; @Sources({ "file:conf/mlplan.properties" }) public interface IMLPlanClassifierConfig extends TwoPhaseHASCOConfig { public static final String PREFERRED_COMPONENTS = "mlplan.preferredComponents"; public static final String SELECTION_PORTION = "mlplan.selectionportion"; public static final String PRECAUTION_OFFSET = "mlplan.precautionoffset"; @Key(SELECTION_PORTION) @DefaultValue("0.3") public double dataPortionForSelection(); @Key(PRECAUTION_OFFSET) @DefaultValue("5") public int precautionOffset(); @Key(PREFERRED_COMPONENTS) public List<String> preferredComponents(); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/AlwaysEvaluateSafeGuard.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.algorithm.Timeout; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.mlplan.core.ITimeTrackingLearner; /** * The AlwaysEvaluateSafeGuard is more of a dummy encoding of a safe guard that will always predict that any algorithm will be evaluated within the timeout and return a result in instant time. It thus can be employed to disable the safe guard feature of the {@link ai.libs.mlplan.core.PipelineEvaluator}. * * @author mwever */ public class AlwaysEvaluateSafeGuard implements IEvaluationSafeGuard { /** * Standard constructor for initializing an AlwaysEvaluateSafeGuard. */ public AlwaysEvaluateSafeGuard() { // nothing to do here as the dummy safe guard is a static one. } @Override public boolean predictWillAdhereToTimeout(final IComponentInstance ci, final Timeout timeout) throws Exception { if (!(ci instanceof ComponentInstance)) { throw new IllegalArgumentException("Only works with ComponentInstance objects"); } ((ComponentInstance)ci).putAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INDUCTION_TIME, "0.0"); ((ComponentInstance)ci).putAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INFERENCE_TIME, "0.0"); // always predict that it will adhere to the timeout, no matter what timeout is given. return true; } @Override public double predictInductionTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain) throws Exception { // predict it will be induced instantly. return 0; } @Override public double predictInferenceTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain, final ILabeledDataset<?> dTest) throws Exception { // predict it will be induced instantly. return 0; } @Override public void updateWithActualInformation(final IComponentInstance ci, final ITimeTrackingLearner learner) { // nothing to remember here } @Override public void registerListener(final Object listener) { // nothing to register at this point. } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/AlwaysEvaluateSafeGuardFactory.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; public class AlwaysEvaluateSafeGuardFactory implements IEvaluationSafeGuardFactory { public AlwaysEvaluateSafeGuardFactory() { // nothing to do here } @Override public IEvaluationSafeGuardFactory withEvaluator(final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> searchEvaluator) { // nothing to do here return this; } @Override public IEvaluationSafeGuard build() throws Exception { return new AlwaysEvaluateSafeGuard(); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/AlwaysPreventSafeGuard.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.algorithm.Timeout; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.mlplan.core.ITimeTrackingLearner; public class AlwaysPreventSafeGuard implements IEvaluationSafeGuard { @Override public boolean predictWillAdhereToTimeout(final IComponentInstance ci, final Timeout timeout) throws Exception { if (!(ci instanceof ComponentInstance)) { throw new IllegalArgumentException("Only works with ComponentInstance objects"); } ((ComponentInstance)ci).putAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INDUCTION_TIME, Integer.MAX_VALUE + ""); ((ComponentInstance)ci).putAnnotation(IEvaluationSafeGuard.ANNOTATION_PREDICTED_INFERENCE_TIME, Integer.MAX_VALUE + ""); return false; } @Override public double predictInductionTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain) throws Exception { return Integer.MAX_VALUE; } @Override public double predictInferenceTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain, final ILabeledDataset<?> dTest) throws Exception { return Integer.MAX_VALUE; } @Override public void updateWithActualInformation(final IComponentInstance ci, final ITimeTrackingLearner wrappedLearner) { // intentionally do nothing } @Override public void registerListener(final Object listener) { // intentionally do nothing } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/AlwaysPreventSafeGuardFactory.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; public class AlwaysPreventSafeGuardFactory implements IEvaluationSafeGuardFactory { public AlwaysPreventSafeGuardFactory() { // nothing to do here } @Override public IEvaluationSafeGuardFactory withEvaluator(final ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> searchEvaluator) { return this; } @Override public IEvaluationSafeGuard build() throws Exception { return new AlwaysPreventSafeGuard(); } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/EvaluationSafeGuardException.java
package ai.libs.mlplan.safeguard; import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException; import ai.libs.jaicore.components.api.IComponentInstance; public class EvaluationSafeGuardException extends ObjectEvaluationFailedException { /** * Auto-generated serial version UID. */ private static final long serialVersionUID = 2170317514693997168L; private final IComponentInstance ci; public EvaluationSafeGuardException(final String message, final IComponentInstance ci) { super(message); this.ci = ci; } public IComponentInstance getCausingComponentInstance() { return this.ci; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/EvaluationSafeGuardFiredEvent.java
package ai.libs.mlplan.safeguard; import org.api4.java.common.event.IEvent; import ai.libs.jaicore.components.api.IComponentInstance; public class EvaluationSafeGuardFiredEvent implements IEvent { private final IComponentInstance ci; private final long timestamp; public EvaluationSafeGuardFiredEvent(final IComponentInstance ci) { this.timestamp = System.currentTimeMillis(); this.ci = ci; } public IComponentInstance getComponentInstance() { return this.ci; } @Override public long getTimestamp() { return this.timestamp; } }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/IEvaluationSafeGuard.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.algorithm.Timeout; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.mlplan.core.ITimeTrackingLearner; /** * An evaluation safe guard can be used to predict whether an evaluation is likely to succeed or not. * Based on this information the evaluation may be adapted or omitted totally. * * Three types of runtimes are distinguished: * - induction time: The time needed to induce a model as described via the component instance. * - inference time: The time needed to do inference with a model as described via the component instance. * - evaluation time: The time needed to do both: induction and inference. Thus, it is the sum of the previous two runtimes. * * @author mwever */ public interface IEvaluationSafeGuard { public static final String ANNOTATION_PREDICTED_INDUCTION_TIME = "predictedInductionTime"; public static final String ANNOTATION_PREDICTED_INFERENCE_TIME = "predictedInferenceTime"; public static final String ANNOTATION_SOURCE = "predictionSource"; /** * Predicts whether a component instance <code>ci</code> is likely to adhere to the given <code>timeout</code>. * @parma ci The component instance to make the prediction for. * @param timeout The timeout posed to the evaluation of the component instance. * @return Returns true iff the component instance can likely be evaluated within the given timeout. * @throws Exception */ public boolean predictWillAdhereToTimeout(final IComponentInstance ci, Timeout timeout) throws Exception; /** * Predicts the runtime that is required for inducing a model. * * @param ci The component instance describing the model to predict the induction time for. * @param metaFeaturesTrain The meta features describing the data inducing a model from. * @return The time needed for inducing a model. * @throws Exception */ public double predictInductionTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain) throws Exception; /** * Predicts the runtime that is required for doing inference with the given model. * * @param ci The component instance describing the model to predict the inference time for. * @param metaFeaturesTest The meta features describing the data for which inference is to be done. * @return The time needed for making predictions on the validation set. * @throws Exception */ public double predictInferenceTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain, final ILabeledDataset<?> dTest) throws Exception; /** * @param ci The component instance describing the model to predict the evaluation time for. * @param metaFeaturesTrain The meta features describing the data to induce a model from. * @param metaFeaturesTest The meta features describing the data to do inference for. * @return The time needed for inducing a model and making predictions. */ default double predictEvaluationTime(final IComponentInstance ci, final ILabeledDataset<?> dTrain, final ILabeledDataset<?> dTest) throws Exception { return this.predictInductionTime(ci, dTrain) + this.predictInferenceTime(ci, dTrain, dTest); } /** * Updates the safe guard with current information obtained by measuring the induction and inference time of the given component instance on-line. * * @param ci The component instance describing the model to update the actual information for. * @param wrappedLearner The learner that has been used to evaluate the component instance. It must be a time tracking learner. */ public void updateWithActualInformation(final IComponentInstance ci, final ITimeTrackingLearner wrappedLearner); public void registerListener(Object listener); }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/IEvaluationSafeGuardFactory.java
package ai.libs.mlplan.safeguard; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.ISupervisedLearnerEvaluator; public interface IEvaluationSafeGuardFactory { public IEvaluationSafeGuardFactory withEvaluator(ISupervisedLearnerEvaluator<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> searchEvaluator); public IEvaluationSafeGuard build() throws Exception; }
0
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-core/0.2.7/ai/libs/mlplan/safeguard/package-info.java
/** * This package contains tooling for making decisions on whether or not to evaluate a classifier. * Consider the scenario that the evaluation is only granted a limited time for running and it is * very unlikely that the evaluation would finish within this timeout. Instead of wasting the time * for the evaluation and eventually aborting the evaluation routine, a safe guard would notify * the user directly that the evaluation routine is anticipated to not finish within the given * time frame. * * Based on this information the user then may decide to change certain parameters of the evaluation: * - Grant more time for evaluation (extend time resources) * - Change parameters of the dataset to evaluate on (decrease data complexity / effort for induction/prediction) * - Change parameters of the model to be evaluated (decrease model complexity for faster induction/prediction) */ package ai.libs.mlplan.safeguard;
0
java-sources/ai/libs/mlplan-ext-bigdata/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-ext-bigdata/0.2.7/ai/libs/mlplan/bigdata/MLPlan4BigFileInput.java
package ai.libs.mlplan.bigdata; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.Timeout; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.Subscribe; import ai.libs.jaicore.basic.StatisticsUtil; import ai.libs.jaicore.basic.algorithm.AAlgorithm; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.ml.core.evaluation.evaluator.events.MCCVSplitEvaluationEvent; import ai.libs.jaicore.ml.core.filter.sampling.infiles.ReservoirSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.SimpleRandomSamplingFactory; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.LearningCurveExtrapolatedEvent; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.ipl.InversePowerLawExtrapolationMethod; import ai.libs.jaicore.ml.weka.classification.learner.IWekaClassifier; import ai.libs.jaicore.ml.weka.dataset.WekaInstances; import ai.libs.mlplan.core.MLPlan; import ai.libs.mlplan.core.events.SupervisedLearnerCreatedEvent; import ai.libs.mlplan.weka.MLPlanWekaBuilder; import weka.classifiers.Classifier; import weka.classifiers.functions.LinearRegression; import weka.core.Attribute; import weka.core.DenseInstance; import weka.core.Instance; import weka.core.Instances; /** * This is a version of ML-Plan that tries to cope with medium sized data in the sense of big files. * That is, the data is still enough to be organized in a single file such that no streaming is required. * The data is, however, in general too large to be entirely loaded into memory. * * We use simple sampling to create a relatively small subset of the data, then run info gain, and then ML-Plan with * learning curve prediction. * * @author fmohr * */ public class MLPlan4BigFileInput extends AAlgorithm<File, Classifier> implements ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(MLPlan4BigFileInput.class); private File intermediateSizeDownsampledFile = new File("testrsc/sampled/intermediate/" + this.getInput().getName()); private final int[] anchorpointsTraining = new int[] { 8, 16, 64, 128 }; private Map<ISupervisedLearner<?, ?>, IComponentInstance> classifier2modelMap = new HashMap<>(); private Map<IComponentInstance, int[]> trainingTimesDuringSearch = new HashMap<>(); private Map<IComponentInstance, List<Integer>> trainingTimesDuringSelection = new HashMap<>(); private int numTrainingInstancesUsedInSelection; private MLPlan<IWekaClassifier> mlplan; public MLPlan4BigFileInput(final File input) { super(input); } private void downsampleData(final File from, final File to, final int size) throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException, AlgorithmTimeoutedException { ReservoirSampling sampler = new ReservoirSampling(new Random(0), this.getInput()); try { File outputFolder = to.getParentFile(); if (!outputFolder.exists()) { this.logger.info("Creating data output folder {}", outputFolder.getAbsolutePath()); outputFolder.mkdirs(); } this.logger.info("Starting sampler {} for data source {}", sampler.getClass().getName(), from.getAbsolutePath()); sampler.setOutputFileName(to.getAbsolutePath()); sampler.setSampleSize(size); sampler.call(); this.logger.info("Reduced dataset size to {}", size); } catch (IOException e) { throw new AlgorithmException("Could not create a sub-sample of the given data.", e); } } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException, AlgorithmException { switch (this.getState()) { case CREATED: /* first create an intermediate sized downsampled file (10k instances), which is the basis for further operations */ this.downsampleData(this.getInput(), this.intermediateSizeDownsampledFile, 10000); /* down-sample the intermediate sized input data again for ML-Plan */ File downsampledFile = new File("testrsc/sampled/" + this.getInput().getName()); this.downsampleData(this.intermediateSizeDownsampledFile, downsampledFile, 1000); if (!downsampledFile.exists()) { throw new AlgorithmException("The file " + downsampledFile.getAbsolutePath() + " that should be used for ML-Plan does not exist!"); } Instances data; try { data = new Instances(new FileReader(downsampledFile)); data.setClassIndex(data.numAttributes() - 1); this.logger.info("Loaded {}x{} dataset", data.size(), data.numAttributes()); } catch (IOException e) { throw new AlgorithmException("Could not create a sub-sample of the given data.", e); } /* apply ML-Plan to reduced data */ MLPlanWekaBuilder builder; try { builder = new MLPlanWekaBuilder(); builder.withLearningCurveExtrapolationEvaluation(this.anchorpointsTraining, new SimpleRandomSamplingFactory<>(), .7, new InversePowerLawExtrapolationMethod()); builder.withNodeEvaluationTimeOut(new Timeout(15, TimeUnit.MINUTES)); builder.withCandidateEvaluationTimeOut(new Timeout(5, TimeUnit.MINUTES)); this.mlplan = builder.withDataset(new WekaInstances(data)).build(); this.mlplan.setLoggerName(this.getLoggerName() + ".mlplan"); this.mlplan.registerListener(this); this.mlplan.setTimeout(new Timeout(this.getTimeout().seconds() - 30, TimeUnit.SECONDS)); this.mlplan.setNumCPUs(3); this.mlplan.setBuildSelectedClasifierOnGivenData(false); // we will build the classifier, ML-Plan should not waste time with this this.logger.info("ML-Plan initialized, activation finished!"); return this.activate(); } catch (IOException e) { throw new AlgorithmException("Could not initialize ML-Plan!", e); } case ACTIVE: /* run ML-Plan */ this.logger.info("Starting ML-Plan."); this.mlplan.call(); this.logger.info("ML-Plan has finished. Selected classifier is {} with observed internal performance {}. Will now try to determine the portion of training data that may be used for final training.", this.mlplan.getSelectedClassifier(), this.mlplan.getInternalValidationErrorOfSelectedClassifier()); /* fit regression model to estimate the runtime behavior of the selected classifier */ int[] trainingTimesDuringSearch = this.trainingTimesDuringSearch.get(this.mlplan.getComponentInstanceOfSelectedClassifier()); List<Integer> trainingTimesDuringSelection = this.trainingTimesDuringSelection.get(this.mlplan.getComponentInstanceOfSelectedClassifier()); this.logger.info("Observed training times of selected classifier: {} (search) and {} (selection on {} training instances)", Arrays.toString(trainingTimesDuringSearch), trainingTimesDuringSelection, this.numTrainingInstancesUsedInSelection); Instances observedRuntimeData = this.getTrainingTimeInstancesForClassifier(this.mlplan.getComponentInstanceOfSelectedClassifier()); this.logger.info("Infered the following data:\n{}", observedRuntimeData); LinearRegression lr = new LinearRegression(); try { lr.buildClassifier(observedRuntimeData); this.logger.info("Obtained the following output for the regression model: {}", lr); } catch (Exception e1) { throw new AlgorithmException("Could not build a regression model for the runtime.", e1); } /* determine the number of instances that can be used for training with this classifier in the remaining time */ int numInstances = 500; int remainingTime = (int)this.getRemainingTimeToDeadline().milliseconds(); this.logger.info("Determining number of instances that can be used for training given that {}s are remaining.", (int)Math.round(remainingTime / 1000.0)); while (numInstances < 10000) { Instance low = this.getInstanceForRuntimeAnalysis(numInstances); try { double predictedRuntime = lr.classifyInstance(low); if (predictedRuntime > remainingTime) { this.logger.info("Obtained predicted runtime of {}ms for {} training instances, which is more time than we still have. Choosing this number.", predictedRuntime, numInstances); break; } else { this.logger.info("Obtained predicted runtime of {}ms for {} training instances, which still seems managable.", predictedRuntime, numInstances); numInstances += 50; } } catch (Exception e) { throw new AlgorithmException("Could not obtain a runtime prediction for " + numInstances + " instances.", e); } } this.logger.info("Believe that {} instances can be used for training in time!", numInstances); /* train the classifier with the determined number of samples */ try { File finalDataFile = new File("testrsc/sampled/final/" + this.getInput().getName()); this.downsampleData(this.intermediateSizeDownsampledFile, finalDataFile, numInstances); Instances completeData = new Instances(new FileReader(finalDataFile)); completeData.setClassIndex(completeData.numAttributes() - 1); this.logger.info("Created final dataset with {} instances. Now building the final classifier.", completeData.size()); long startFinalTraining = System.currentTimeMillis(); this.mlplan.getSelectedClassifier().fit(new WekaInstances(completeData)); this.logger.info("Classifier has been fully trained within {}ms.", System.currentTimeMillis() - startFinalTraining); } catch (Exception e) { throw new AlgorithmException("Could not train the final classifier with the full data.", e); } return this.terminate(); default: throw new IllegalStateException(); } } private Instances getTrainingTimeInstancesForClassifier(final IComponentInstance ci) { ArrayList<Attribute> attributes = new ArrayList<>(); attributes.add(new Attribute("numInstances")); // attributes.add(new Attribute("numInstancesSquared")); attributes.add(new Attribute("runtime")); Instances data = new Instances("Runtime Analysis Regression Data for " + ci, attributes, 0); /* create one instance for each data point during search phase */ for (int i = 0; i < this.anchorpointsTraining.length; i++) { Instance inst = this.getInstanceForRuntimeAnalysis(this.anchorpointsTraining[i]); inst.setValue(1, this.trainingTimesDuringSearch.get(ci)[i]); data.add(inst); } /* create one instance for the mean of the values observed in selection phase */ if (this.trainingTimesDuringSelection.containsKey(ci)) { Instance inst = this.getInstanceForRuntimeAnalysis(this.numTrainingInstancesUsedInSelection); inst.setValue(1, StatisticsUtil.mean(this.trainingTimesDuringSelection.get(ci))); data.add(inst); } else { this.logger.warn("Classifier {} has not been evaluated in selection phase. Cannot use this information to fit its regression model.", ci); } /* set target attribute and return data */ data.setClassIndex(1); return data; } private Instance getInstanceForRuntimeAnalysis(final int numberOfInstances) { Instance inst = new DenseInstance(3); inst.setValue(0, numberOfInstances); // inst.setValue(1, Math.pow(numberOfInstances, 2)); return inst; } @Subscribe public void receiveClassifierCreatedEvent(final SupervisedLearnerCreatedEvent e) { this.logger.info("Binding component instance {} to classifier {}", e.getInstance(), e.getClassifier()); this.classifier2modelMap.put(e.getClassifier(), e.getInstance()); } @Subscribe public void receiveExtrapolationFinishedEvent(final LearningCurveExtrapolatedEvent e) { IComponentInstance ci = this.classifier2modelMap.get(e.getExtrapolator().getLearner()); this.logger.info("Storing training times {} for classifier {}", Arrays.toString(e.getExtrapolator().getTrainingTimes()), ci); this.trainingTimesDuringSearch.put(ci, e.getExtrapolator().getTrainingTimes()); } @Subscribe public void receiveMCCVFinishedEvent(final MCCVSplitEvaluationEvent e) { IComponentInstance ci = this.classifier2modelMap.get(e.getClassifier()); this.logger.info("Storing training time {} for classifier {} in selection phase with {} training instances and {} validation instances", e.getSplitEvaluationTime(), ci, e.getNumInstancesUsedForTraining(), e.getNumInstancesUsedForValidation()); if (this.numTrainingInstancesUsedInSelection == 0) { this.numTrainingInstancesUsedInSelection = e.getNumInstancesUsedForTraining(); } else if (this.numTrainingInstancesUsedInSelection != e.getNumInstancesUsedForTraining()) { this.logger.warn("Memorized {} as number of instances used for training in selection phase, but now observed one classifier using {} instances.", this.numTrainingInstancesUsedInSelection, e.getNumInstancesUsedForTraining()); } if (!this.trainingTimesDuringSelection.containsKey(ci)) { this.trainingTimesDuringSelection.put(ci, new ArrayList<>()); } this.trainingTimesDuringSelection.get(ci).add(e.getSplitEvaluationTime()); } @Override public Classifier call() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException, AlgorithmException { while (this.hasNext()) { this.next(); } return this.mlplan.getSelectedClassifier().getClassifier(); } @Override public void setLoggerName(final String loggerName) { this.logger = LoggerFactory.getLogger(loggerName); } @Override public String getLoggerName() { return this.logger.getName(); } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/IntermediateSolutionEvent.java
package ai.libs.mlplan.metamining; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.algorithm.IAlgorithm; import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent; import ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline; public class IntermediateSolutionEvent extends AAlgorithmEvent { private String classifier; private String searcher; private String evaluator; private double score; public IntermediateSolutionEvent(final IAlgorithm<?, ?> algorithm, final IClassifier classifier, final double score) { super (algorithm); if (classifier instanceof MLPipeline) { MLPipeline pl = (MLPipeline) classifier; this.classifier=pl.getBaseClassifier().getClass().getName(); if (pl.getPreprocessors() != null && !pl.getPreprocessors().isEmpty()) { this.searcher = pl.getPreprocessors().get(0).getSearcher().getClass().getName(); this.evaluator = pl.getPreprocessors().get(0).getEvaluator().getClass().getName(); } } else { this.classifier = classifier.getClass().getName(); } this.score=score; } public String getClassifier() { return this.classifier; } public String getSearcher() { return this.searcher; } public String getEvaluator() { return this.evaluator; } public double getScore() { return this.score; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/MetaMLPlan.java
package ai.libs.mlplan.metamining; import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; import java.util.Timer; import java.util.TimerTask; import org.apache.commons.lang3.time.StopWatch; import org.api4.java.ai.graphsearch.problem.IPathSearchInput; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.openml.webapplication.fantail.dc.GlobalCharacterizer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.eventbus.EventBus; import ai.libs.hasco.core.HASCOUtil; import ai.libs.hasco.metamining.MetaMinerBasedSorter; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.logging.LoggerUtil; import ai.libs.jaicore.ml.classification.loss.dataset.EAggregatedClassifierMetric; import ai.libs.jaicore.ml.core.evaluation.MLEvaluationUtil; import ai.libs.jaicore.ml.weka.classification.learner.IWekaClassifier; import ai.libs.jaicore.ml.weka.dataset.WekaInstances; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.algorithms.standard.lds.BestFirstLimitedDiscrepancySearch; import ai.libs.jaicore.search.algorithms.standard.lds.BestFirstLimitedDiscrepancySearchFactory; import ai.libs.jaicore.search.algorithms.standard.lds.NodeOrderList; import ai.libs.jaicore.search.model.other.SearchGraphPath; import ai.libs.jaicore.search.model.travesaltree.ReducedGraphGenerator; import ai.libs.jaicore.search.probleminputs.GraphSearchWithNodeRecommenderInput; import ai.libs.mlplan.metamining.databaseconnection.ExperimentRepository; import ai.libs.mlplan.weka.MLPlan4Weka; import ai.libs.mlplan.weka.MLPlanWekaBuilder; import ai.libs.mlplan.weka.weka.MLPipelineComponentInstanceFactory; import ai.libs.mlplan.weka.weka.WekaPipelineFactory; import weka.classifiers.AbstractClassifier; import weka.core.Instance; import weka.core.Instances; public class MetaMLPlan extends AbstractClassifier { private transient Logger logger = LoggerFactory.getLogger(MetaMLPlan.class); // ids private static final long serialVersionUID = 4772178784402396834L; private static final File resourceFile = new File("resources/automl/searchmodels/weka/weka-all-autoweka.json"); private String algorithmId = "MetaMLPlan"; // Search components private transient BestFirstLimitedDiscrepancySearch<GraphSearchWithNodeRecommenderInput<TFDNode, String>, TFDNode, String, NodeOrderList> lds; private transient WEKAMetaminer metaMiner; private transient WekaPipelineFactory factory = new WekaPipelineFactory(); // Search configuration private long timeoutInSeconds = 60; private long safetyInSeconds = 1; private int cpus = 1; private String metaFeatureSetName = "all"; private String datasetSetName = "all"; private int seed = 0; // Search results private IWekaClassifier bestModel; private transient Collection<IComponent> components; // For intermediate results private transient EventBus eventBus = new EventBus(); public MetaMLPlan(final ILabeledDataset<?> data) throws IOException { this(resourceFile, data); } public MetaMLPlan(final File configFile, final ILabeledDataset<?> data) throws IOException { // Prepare mlPlan to get a graphGenerator MLPlanWekaBuilder builder = new MLPlanWekaBuilder(); builder.withSearchSpaceConfigFile(configFile); builder.withDataset(data); MLPlan4Weka mlPlan = builder.build(); mlPlan.next(); // Set search components except lds this.components = builder.getComponents(); this.metaMiner = new WEKAMetaminer(builder.getComponentParameterConfigurations()); // Get lds BestFirstLimitedDiscrepancySearchFactory<GraphSearchWithNodeRecommenderInput<TFDNode, String>, TFDNode, String, NodeOrderList> ldsFactory = new BestFirstLimitedDiscrepancySearchFactory<>(); IPathSearchInput<TFDNode, String> originalInput = mlPlan.getSearchProblemInputGenerator(); GraphSearchWithNodeRecommenderInput<TFDNode, String> problemInput = new GraphSearchWithNodeRecommenderInput<>(new ReducedGraphGenerator<>(originalInput.getGraphGenerator()), originalInput.getGoalTester(), new MetaMinerBasedSorter(this.metaMiner, builder.getComponents())); ldsFactory.setProblemInput(problemInput); this.lds = ldsFactory.getAlgorithm(); } public void buildMetaComponents(final String host, final String user, final String password) throws AlgorithmException, InterruptedException, SQLException, IOException { ExperimentRepository repo = new ExperimentRepository(host, user, password, new MLPipelineComponentInstanceFactory(this.components), this.cpus, this.metaFeatureSetName, this.datasetSetName); this.metaMiner.build(repo.getDistinctPipelines(), repo.getDatasetCahracterizations(), repo.getPipelineResultsOnDatasets()); } public void buildMetaComponents(final String host, final String user, final String password, final int limit) throws AlgorithmException, InterruptedException, SQLException, IOException { this.logger.info("Get past experiment data from data base and build MetaMiner."); ExperimentRepository repo = new ExperimentRepository(host, user, password, new MLPipelineComponentInstanceFactory(this.components), this.cpus, this.metaFeatureSetName, this.datasetSetName); repo.setLimit(limit); this.metaMiner.build(repo.getDistinctPipelines(), repo.getDatasetCahracterizations(), repo.getPipelineResultsOnDatasets()); } @Override public void buildClassifier(final Instances data) throws Exception { StopWatch totalTimer = new StopWatch(); totalTimer.start(); // Characterize data set and give to meta miner this.logger.info("Characterizing data set"); this.metaMiner.setDataSetCharacterization(new GlobalCharacterizer().characterize(data)); // Preparing the split for validating pipelines this.logger.info("Preparing validation split"); // Search for solutions this.logger.info("Searching for solutions"); StopWatch trainingTimer = new StopWatch(); this.bestModel = null; double bestScore = 1; double bestModelMaxTrainingTime = 0; boolean thereIsEnoughTime = true; boolean thereAreMoreElements = true; while (!this.lds.isCanceled() && thereIsEnoughTime && thereAreMoreElements) { try { SearchGraphPath<TFDNode, String> searchGraphPath = this.lds.nextSolutionCandidate(); List<TFDNode> solution = searchGraphPath.getNodes(); if (solution == null) { this.logger.info("Ran out of solutions. Search is over."); break; } // Prepare pipeline ComponentInstance ci = HASCOUtil.getSolutionCompositionFromState(this.components, solution.get(solution.size() - 1).getState(), true); IWekaClassifier pl = this.factory.getComponentInstantiation(ci); // Evaluate pipeline trainingTimer.reset(); trainingTimer.start(); this.logger.info("Evaluate Pipeline: {}", pl); double score = MLEvaluationUtil.mccv(pl, new WekaInstances(data), 5, .7, this.seed, EAggregatedClassifierMetric.MEAN_ERRORRATE); this.logger.info("Pipeline Score: {}", score); trainingTimer.stop(); this.eventBus.post(new IntermediateSolutionEvent(null, pl, score)); // Check if better than previous best if (score < bestScore) { this.bestModel = pl; bestScore = score; } if (trainingTimer.getTime() > bestModelMaxTrainingTime) { bestModelMaxTrainingTime = trainingTimer.getTime(); } thereIsEnoughTime = this.checkTermination(totalTimer, bestModelMaxTrainingTime, thereIsEnoughTime); } catch (NoSuchElementException e) { this.logger.info("Finished search (Exhaustive search conducted)."); thereAreMoreElements = false; } catch (Exception e) { this.logger.warn("Continuing search despite error: {}", LoggerUtil.getExceptionInfo(e)); } } Thread finalEval = new Thread() { @Override public void run() { MetaMLPlan.this.logger.info("Evaluating best model on whole training data ({})", MetaMLPlan.this.bestModel); try { MetaMLPlan.this.bestModel.getClassifier().buildClassifier(data); } catch (Exception e) { MetaMLPlan.this.bestModel = null; MetaMLPlan.this.logger.error("Evaluation of best model failed with an exception: {}", LoggerUtil.getExceptionInfo(e)); } } }; TimerTask newT = new TimerTask() { @Override public void run() { MetaMLPlan.this.logger.error("MetaMLPlan: Interrupt building of final classifier because time is running out."); finalEval.interrupt(); } }; // Start timer that interrupts the final training try { new Timer().schedule(newT, this.timeoutInSeconds * 1000 - this.safetyInSeconds * 1000 - totalTimer.getTime()); } catch (IllegalArgumentException e) { this.logger.error("No time anymore to start evaluation of final model. Abort search."); return; } finalEval.start(); finalEval.join(); this.logger.info("Ready. Best solution: {}", this.bestModel); } private boolean checkTermination(final StopWatch totalTimer, final double bestModelMaxTrainingTime, boolean thereIsEnoughTime) { // Check if enough time remaining to re-train the current best model on the // whole training data if ((this.timeoutInSeconds - this.safetyInSeconds) * 1000 <= (totalTimer.getTime() + bestModelMaxTrainingTime)) { this.logger.info("Stopping search to train best model on whole training data which is expected to take {} ms", bestModelMaxTrainingTime); thereIsEnoughTime = false; } return thereIsEnoughTime; } @Override public double classifyInstance(final Instance instance) throws Exception { return this.bestModel.getClassifier().classifyInstance(instance); } public void registerListenerForIntermediateSolutions(final Object listener) { this.eventBus.register(listener); } public void setTimeOutInSeconds(final int timeOutInSeconds) { this.timeoutInSeconds = timeOutInSeconds; } public void setMetaFeatureSetName(final String metaFeatureSetName) { this.metaFeatureSetName = metaFeatureSetName; } public void setDatasetSetName(final String datasetSetName) { this.datasetSetName = datasetSetName; } public void setCPUs(final int cPUs) { this.cpus = cPUs; } public WEKAMetaminer getMetaMiner() { return this.metaMiner; } public void setSeed(final int seed) { this.seed = seed; } public String getAlgorithmId() { return this.algorithmId; } public void setAlgorithmId(final String algorithmId) { this.algorithmId = algorithmId; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/WEKAMetaminer.java
package ai.libs.mlplan.metamining; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Map; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.metamining.IMetaMiner; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.mlplan.metamining.pipelinecharacterizing.IPipelineCharacterizer; import ai.libs.mlplan.metamining.pipelinecharacterizing.WEKAPipelineCharacterizer; import ai.libs.mlplan.metamining.similaritymeasures.F3Optimizer; import ai.libs.mlplan.metamining.similaritymeasures.IHeterogenousSimilarityMeasureComputer; import ai.libs.mlplan.metamining.similaritymeasures.IRelativeRankMatrixComputer; import ai.libs.mlplan.metamining.similaritymeasures.RelativeRankMatricComputer; import weka.core.Attribute; import weka.core.Instances; /** * An implementation of the meta miner for pipelines consisting exclusively of WEKA components. * * @author Helena Graf * */ public class WEKAMetaminer implements IMetaMiner { private Logger logger = LoggerFactory.getLogger(WEKAMetaminer.class); private boolean hasBeenBuilt = false; private INDArray datasetMetafeatures; private Enumeration<Attribute> dataSetMetaFeaturesAttributes; private IHeterogenousSimilarityMeasureComputer similarityMeasure = new F3Optimizer(0.1); private IRelativeRankMatrixComputer similarityComputer = new RelativeRankMatricComputer(); private IPipelineCharacterizer pipelineCharacterizer; public WEKAMetaminer(final INumericParameterRefinementConfigurationMap paramConfigs) { this.pipelineCharacterizer = new WEKAPipelineCharacterizer(paramConfigs); } @Override public double score(final ComponentInstance componentInstance) { // Check if has been trained if (!this.hasBeenBuilt) { throw new WEKAMetaminerRuntimeException("Metaminer has not been built!"); } if (this.dataSetMetaFeaturesAttributes == null) { throw new WEKAMetaminerRuntimeException("Metaminer has not been given a data set characterization!"); } // Characterize pipeline and compute similarity with data set double[] pipelineMetafeatures = this.pipelineCharacterizer.characterize(componentInstance); return this.similarityMeasure.computeSimilarity(this.datasetMetafeatures, Nd4j.create(pipelineMetafeatures)); } public void build(final List<? extends IComponentInstance> distinctPipelines, final Instances metaFeatureInformation, final double[][][] performanceValues) throws AlgorithmException, InterruptedException { // Check whether has been built if (this.hasBeenBuilt) { throw new AlgorithmException("MetaMiner has already been built!"); } // ----- Data set Characterization ----- this.dataSetMetaFeaturesAttributes = metaFeatureInformation.enumerateAttributes(); // Convert to matrix (Matrix X with rows representing data sets) INDArray datasetsMetafeatures = Nd4j.create(metaFeatureInformation.size(), metaFeatureInformation.numAttributes()); for (int i = 0; i < metaFeatureInformation.size(); i++) { datasetsMetafeatures.putRow(i, Nd4j.create(metaFeatureInformation.get(i).toDoubleArray())); } this.logger.debug("Dataset metafeatures: {} x {}",datasetsMetafeatures.rows(),datasetsMetafeatures.columns()); // ----- Pipeline Characterization ----- // Compute relative performance ranks of pipelines on data sets this.logger.info("Computing relative performance Matrix."); INDArray rankMatrix = this.similarityComputer.computeRelativeRankMatrix(performanceValues); this.logger.info("Rank matrix: {} x {}",rankMatrix.rows(),rankMatrix.columns()); this.logger.debug("Rank Matrix: {}",rankMatrix); // Initialize PipelineCharacterizer with list of distinct pipelines this.logger.info("WEKAMetaminer: Initializing pipeline characterizer."); this.pipelineCharacterizer.build(distinctPipelines); // Get Characterization of base pipelines from PipelineCharacterizer (Matrix W) INDArray pipelinesMetafeatures = Nd4j.create(this.pipelineCharacterizer.getCharacterizationsOfTrainingExamples()); this.logger.debug("WEKAMetaminer: Pipeline Metafeatures: {} x {}",pipelinesMetafeatures.rows(),pipelinesMetafeatures.columns()); // Initialize HeterogenousSimilarityMeasures this.logger.info("WEKAMetaminer: Create similarity measure."); this.similarityMeasure.build(datasetsMetafeatures, pipelinesMetafeatures, rankMatrix); // Building is finished this.hasBeenBuilt = true; } public void setDataSetCharacterization(final Map<String, Double> datasetCharacterization) { // Characterize the given data set with characterizer (set x) this.datasetMetafeatures = Nd4j.create(datasetCharacterization.size()); List<Attribute> attributes = Collections.list(this.dataSetMetaFeaturesAttributes); for (int i = 0; i < attributes.size(); i++) { this.datasetMetafeatures.putScalar(i, datasetCharacterization.get(attributes.get(i).name())); } } /** * Get the similarity measure used to determine the similarities of s * * @return */ public IHeterogenousSimilarityMeasureComputer getSimilarityMeasure() { return this.similarityMeasure; } public void setSimilarityMeasure(final IHeterogenousSimilarityMeasureComputer similarityMeasure) { this.similarityMeasure = similarityMeasure; } public IPipelineCharacterizer getPipelineCharacterizer() { return this.pipelineCharacterizer; } public void setPipelineCharacterizer(final IPipelineCharacterizer pipelineCharacterizer) { this.pipelineCharacterizer = pipelineCharacterizer; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/databaseconnection/ComponentInstanceDatabaseGetter.java
package ai.libs.mlplan.metamining.databaseconnection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.api4.java.datastructure.kvstore.IKVStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.exceptions.ComponentNotFoundException; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.db.IDatabaseAdapter; import ai.libs.jaicore.logging.LoggerUtil; import ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline; import ai.libs.mlplan.weka.weka.MLPipelineComponentInstanceFactory; import weka.attributeSelection.ASEvaluation; import weka.attributeSelection.ASSearch; import weka.classifiers.AbstractClassifier; /** * A worker that gets a range of rows from a database with entries containing * String representations of MLPipelines. These representations are converted * back to ComponentInstances by the thread. Currently only pipelines that * exclusively contain elements from the autoweka-all configuration can be * parsed. All the setters have to used before the thread is run. * * @author Helena Graf * */ public class ComponentInstanceDatabaseGetter extends Thread { private Logger logger = LoggerFactory.getLogger(ComponentInstanceDatabaseGetter.class); private List<ComponentInstance> pipelines; private List<HashMap<String, List<Double>>> pipelinePerformances; private int offset; private int limit; private IDatabaseAdapter adapter; private MLPipelineComponentInstanceFactory factory; private boolean finishedSuccessfully = false; @Override public void run() { String query = "SELECT searcher, evaluator, classifier, GROUP_CONCAT( CONCAT (dataset_id, ':', dataset_origin, ',', error_rate) SEPARATOR ';') AS results FROM basePipelineEvals GROUP BY searcher, evaluator, classifier ORDER BY searcher, evaluator, classifier LIMIT " + this.limit + " OFFSET " + this.offset; try { this.pipelines = new ArrayList<>(this.limit); this.pipelinePerformances = new ArrayList<>(this.limit); List<IKVStore> resultSet = this.adapter.getResultsOfQuery(query); this.logger.debug("ComponentInstanceDatabaseGetter: Thread {} got pipelines from data base.", this.getId()); for (IKVStore store : resultSet) { this.next(store); } } catch (Exception e1) { this.logger.error("Thread {} could not finish getting all pipelines. Cause: {}", this.getId(), e1.getMessage()); return; } this.finishedSuccessfully = true; } private void next(final IKVStore resultSet) throws Exception { try { // Get pipeline ComponentInstance ci; if (resultSet.getAsString("searcher") != null && resultSet.getAsString("evaluator") != null) { ci = this.factory.convertToComponentInstance( new MLPipeline(ASSearch.forName(resultSet.getAsString("searcher"), null), ASEvaluation.forName(resultSet.getAsString("evaluator"), null), AbstractClassifier.forName(resultSet.getAsString("classifier"), null))); } else { ci = this.factory.convertToComponentInstance(new MLPipeline(new ArrayList<>(), AbstractClassifier.forName(resultSet.getAsString("classifier"), null))); } // Get pipeline performance values (errorRate,dataset array) String[] results = resultSet.getAsString("results").split(";"); HashMap<String, List<Double>> datasetPerformances = new HashMap<>(); for (int j = 0; j < results.length; j++) { String[] errorRatePerformance = results[j].split(","); if (!datasetPerformances.containsKey(errorRatePerformance[0])) { datasetPerformances.put(errorRatePerformance[0], new ArrayList<>()); } if (errorRatePerformance.length > 1) { datasetPerformances.get(errorRatePerformance[0]).add(Double.parseDouble(errorRatePerformance[1])); } } this.pipelines.add(ci); this.pipelinePerformances.add(datasetPerformances); } catch (ComponentNotFoundException e) { // Could not convert pipeline - component not in loaded configuration this.logger.warn("Could not convert component due to {}", LoggerUtil.getExceptionInfo(e)); } } /** * Set the row of the table at which this thread should start. * * @param offset * The offset */ public void setOffset(final int offset) { this.offset = offset; } /** * Set the limit of how many rows this thread shall get. * * @param limit * The limit */ public void setLimit(final int limit) { this.limit = limit; } /** * Set the adapter this thread uses to get the data from the data base. It has * to have an open connection. * * @param adapter * The used adapter */ public void setAdapter(final IDatabaseAdapter adapter) { this.adapter = adapter; } /** * Set the factory used to convert the MLPipelines instantiated from the String * representation in the database to ComponentInstances. * * @param factory * The converter factory */ public void setFactory(final MLPipelineComponentInstanceFactory factory) { this.factory = factory; } /** * Get the converted pipelines the thread collected from the data base. * * @return The list of converted pipelines */ public List<ComponentInstance> getPipelines() { return this.pipelines; } /** * Get the performances of the pipelines on the database for which they are * values present. * * @return A list of mappings of data set ids to a list of performance values in * the same order as the returned list of pipelines */ public List<HashMap<String, List<Double>>> getPipelinePerformances() { return this.pipelinePerformances; } /** * Find out whether the thread finished successfully or aborted with an error. * * @return Whether the execution of the thread was successful */ public boolean isFinishedSuccessfully() { return this.finishedSuccessfully; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/databaseconnection/ExperimentRepository.java
package ai.libs.mlplan.metamining.databaseconnection; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.api4.java.datastructure.kvstore.IKVStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.db.IDatabaseAdapter; import ai.libs.mlplan.weka.weka.MLPipelineComponentInstanceFactory; import dataHandling.mySQL.MetaDataDataBaseConnection; import weka.core.Instances; /** * Manages a connection to experiment data of pipelines on dataset in a data * base. * * @author Helena Graf * */ public class ExperimentRepository { private Logger logger = LoggerFactory.getLogger(ExperimentRepository.class); private IDatabaseAdapter adapter; private String host; private String user; private String password; private MLPipelineComponentInstanceFactory factory; private int cpus; private String metaFeatureSetName; private String datasetSetName; private Integer limit; private List<HashMap<String, List<Double>>> pipelinePerformances = new ArrayList<>(); private MetaDataDataBaseConnection metaDataBaseConnection; public ExperimentRepository(final String host, final String user, final String password, final MLPipelineComponentInstanceFactory factory, final int cpus, final String metaFeatureSetName, final String datasetSetName) { this.user = user; this.password = password; this.host = host; this.factory = factory; this.cpus = cpus; this.metaDataBaseConnection = new MetaDataDataBaseConnection(host, user, password, "hgraf"); this.metaFeatureSetName = metaFeatureSetName; this.datasetSetName = datasetSetName; } public List<ComponentInstance> getDistinctPipelines() throws SQLException, InterruptedException { this.connect(); String query = "SELECT (COUNT(DISTINCT searcher, evaluator, classifier)) FROM basePipelineEvals"; List<IKVStore> resultSet = this.adapter.getResultsOfQuery(query); int distinctPipelineCount = resultSet.get(0).getAsInt("(COUNT(DISTINCT searcher, evaluator, classifier))"); distinctPipelineCount = this.limit == null ? distinctPipelineCount : this.limit; this.logger.info("Getting {} distinct pipelines.", distinctPipelineCount); int chunkSize = Math.floorDiv(distinctPipelineCount, this.cpus); int lastchunkSize = distinctPipelineCount - (chunkSize * (this.cpus - 1)); this.logger.debug("ExperimentRepository: Allocate Getter-Threads."); ComponentInstanceDatabaseGetter[] threads = new ComponentInstanceDatabaseGetter[this.cpus]; for (int i = 0; i < threads.length; i++) { threads[i] = new ComponentInstanceDatabaseGetter(); threads[i].setAdapter(this.adapter); threads[i].setOffset(i * chunkSize); threads[i].setLimit(i == (threads.length - 1) ? lastchunkSize : chunkSize); threads[i].setFactory(this.factory); threads[i].start(); } List<ComponentInstance> pipelines = new ArrayList<>(); for (int i = 0; i < threads.length; i++) { threads[i].join(); pipelines.addAll(threads[i].getPipelines()); this.pipelinePerformances.addAll(threads[i].getPipelinePerformances()); } boolean allSuccessful = true; for (int i = 0; i < threads.length; i++) { this.logger.debug("Thread {} finished succuesfully: {}", threads[i].getId(), threads[i].isFinishedSuccessfully()); if (!threads[i].isFinishedSuccessfully()) { allSuccessful = false; } } if (!allSuccessful) { this.logger.error("Not all threads finished the download successfully!"); } else { this.logger.info("All threads finished successfully."); } this.disconnect(); return pipelines; } public Instances getDatasetCahracterizations() throws SQLException { // get data set characterizations this.logger.info("Downloading dataset characterizations."); Instances metaData = this.metaDataBaseConnection.getMetaDataSetForDataSetSet(this.datasetSetName, this.metaFeatureSetName); metaData.deleteAttributeAt(0); return metaData; } /** * Gets all the pipeline results for the distinct pipelines from * {@link #getDistinctPipelines()}, thus has to be called after that method. * * @return The results of pipelines on datasets: rows: data sets, columns: * pipelines, entries: array of results of pipeline on data set * @throws SQLException * If something goes wrong while connecting to the database */ public double[][][] getPipelineResultsOnDatasets() throws SQLException { this.logger.info("Downloading pipeline results for datasets."); // Get order of datasets List<String> datasets = this.metaDataBaseConnection.getMembersOfDatasetSet(this.datasetSetName); // Organize results into matrix double[][][] results = new double[datasets.size()][this.pipelinePerformances.size()][]; for (int j = 0; j < datasets.size(); j++) { String dataset = datasets.get(j); for (int i = 0; i < this.pipelinePerformances.size(); i++) { // Does the pipeline have a result for the dataset List<Double> datasetResults = this.pipelinePerformances.get(i).get(dataset); if (datasetResults != null && !datasetResults.isEmpty()) { results[j][i] = datasetResults.stream().mapToDouble(value -> value).toArray(); } } } return results; } private void connect() { throw new UnsupportedOperationException(); } private void disconnect() { this.adapter.close(); } public void setLimit(final Integer limit) { this.limit = limit; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/dyadranking/DyadRankingBasedNodeEvaluator.java
package ai.libs.mlplan.metamining.dyadranking; import java.io.FileInputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.nio.file.Paths; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import org.aeonbits.owner.ConfigFactory; import org.apache.commons.collections.BidiMap; import org.apache.commons.collections.bidimap.DualHashBidiMap; import org.api4.java.ai.graphsearch.problem.implicit.graphgenerator.IPathGoalTester; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPotentiallyGraphDependentPathEvaluator; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPotentiallySolutionReportingPathEvaluator; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.PathEvaluationException; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.exception.PredictionException; import org.api4.java.ai.ml.ranking.IRanking; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.common.attributedobjects.IObjectEvaluator; import org.api4.java.common.math.IVector; import org.api4.java.datastructure.graph.ILabeledPath; import org.api4.java.datastructure.graph.implicit.IGraphGenerator; import org.openml.webapplication.fantail.dc.LandmarkerCharacterizer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.core.HASCOUtil; import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.api.IComponentRepository; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; import ai.libs.jaicore.ml.classification.loss.dataset.EClassificationPerformanceMeasure; import ai.libs.jaicore.ml.core.evaluation.evaluator.FixedSplitClassifierEvaluator; import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset; import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance; import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker; import ai.libs.jaicore.ml.ranking.dyad.learner.util.DyadMinMaxScaler; import ai.libs.jaicore.ml.weka.WekaUtil; import ai.libs.jaicore.ml.weka.dataset.WekaInstances; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.algorithms.standard.bestfirst.events.EvaluatedSearchSolutionCandidateFoundEvent; import ai.libs.jaicore.search.algorithms.standard.bestfirst.events.FValueEvent; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.RandomCompletionBasedNodeEvaluator; import ai.libs.jaicore.search.algorithms.standard.bestfirst.nodeevaluation.RandomizedDepthFirstNodeEvaluator; import ai.libs.jaicore.search.algorithms.standard.gbf.SolutionEventBus; import ai.libs.jaicore.search.algorithms.standard.random.RandomSearch; import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath; import ai.libs.jaicore.search.model.other.SearchGraphPath; import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput; import ai.libs.mlplan.core.ILearnerFactory; import ai.libs.mlplan.metamining.pipelinecharacterizing.ComponentInstanceVectorFeatureGenerator; import ai.libs.mlplan.metamining.pipelinecharacterizing.IPipelineCharacterizer; import weka.core.Instances; /** * This NodeEvaluator can calculate the f-Value for nodes using dyad ranking. * Thereby, a huge amount of random completion will be drawn in a node, then * these pipelines will ranked using dyad ranking and finally the top k * pipelines will be evaluated, using the best observed score as the f-Value of * the node. * * @param <T> * the node type, typically it is {@link TFDNode} * @param <V> * the type of the score * @author Mirko Juergens * */ public class DyadRankingBasedNodeEvaluator<T, A, V extends Comparable<V>> implements IPotentiallyGraphDependentPathEvaluator<T, A, V>, IPotentiallySolutionReportingPathEvaluator<T, A, V> { private static final Logger logger = LoggerFactory.getLogger(DyadRankingBasedNodeEvaluator.class); /* Key is a path (hence, List<T>) value is a ComponentInstance */ private BidiMap pathToPipelines = new DualHashBidiMap(); /* Used to draw random completions for nodes that are not in the final state */ private RandomSearch<T, A> randomPathCompleter; /* The evaluator that can be used to get the performance of the paths */ private IObjectEvaluator<ComponentInstance, V> pipelineEvaluator; /* Specifies the components of this MLPlan run. */ private Collection<IComponent> components; /* * Specifies the amount of paths that are randomly completed for the computation * of the f-value */ private final int randomlyCompletedPaths; /* The dataset of this MLPlan run. */ private Instances evaluationDataset; /* * X in the paper, these are usually derived using landmarking algorithms on the * dataset */ private double[] datasetMetaFeatures; /* * Specifies the amount of paths that will be evaluated after ranking the paths */ private final int evaluatedPaths; /* The Random instance used to randomly complete the paths */ private final Random random; /* The ranker to use for dyad ranking */ private PLNetDyadRanker dyadRanker = new PLNetDyadRanker(); /* The characterizer to use to derive meta features for pipelines */ private IPipelineCharacterizer characterizer; /* Only used if useLandmarkers is set to true */ /* * Defines how many evaluations for each of the landmarkers are performed, to * reduce variance */ private final int landmarkerSampleSize; /* Only used if useLandmarkers is set to true */ /* Defines the size of the different landmarkers */ private final int[] landmarkers; /* Only used if useLandmarkers is set to true */ /* * The concete lanmarker instances, this array has dimension landmakers.size * \cdot landmarkerSampleSize */ private Instances[][] landmarkerSets; /* Only used if useLandmarkers is set to true */ /* * Used to create landmarker values for pipelines where no such landmarker has * yet been evaluated. */ private ILearnerFactory<IClassifier> classifierFactory; /* * Defines if a landmarking based approach is used for defining the meta * features of the algorithm. */ private boolean useLandmarkers; /* * Used to derive the time until a certain solution has been found, useful for * evaluations */ private Instant firstEvaluation = null; private SolutionEventBus<T> eventBus; private IGraphGenerator<T, A> graphGenerator; private IPathGoalTester<T, A> goalTester; private DyadMinMaxScaler scaler = null; public void setClassifierFactory(final ILearnerFactory<IClassifier> classifierFactory) { this.classifierFactory = classifierFactory; } public DyadRankingBasedNodeEvaluator(final IComponentRepository repository) { this(repository, ConfigFactory.create(DyadRankingBasedNodeEvaluatorConfig.class)); } public DyadRankingBasedNodeEvaluator(final IComponentRepository repository, final DyadRankingBasedNodeEvaluatorConfig config) { this.eventBus = new SolutionEventBus<>(); this.components = repository; this.random = new Random(config.getSeed()); this.evaluatedPaths = config.getNumberOfEvaluations(); this.randomlyCompletedPaths = config.getNumberOfRandomSamples(); logger.debug("Initialized DyadRankingBasedNodeEvaluator with evalNum: {} and completionNum: {}", this.randomlyCompletedPaths, this.evaluatedPaths); this.characterizer = new ComponentInstanceVectorFeatureGenerator(repository); this.landmarkers = config.getLandmarkers(); this.landmarkerSampleSize = config.getLandmarkerSampleSize(); this.useLandmarkers = config.useLandmarkers(); String scalerPath = config.scalerPath(); try { this.dyadRanker.loadModelFromFile(Paths.get(config.getPlNetPath()).toString()); } catch (IOException e) { logger.error("Could not load model for plnet in {}", Paths.get(config.getPlNetPath())); } // load the dyadranker from the config try (ObjectInputStream oin = new ObjectInputStream(new FileInputStream(Paths.get(scalerPath).toFile()));) { this.scaler = (DyadMinMaxScaler) oin.readObject(); } catch (IOException e) { logger.error("Could not load sclader for plnet in {}", Paths.get(config.scalerPath())); } catch (ClassNotFoundException e) { logger.error("Could not read scaler.", e); } } @SuppressWarnings("unchecked") @Override public V evaluate(final ILabeledPath<T, A> path) throws InterruptedException, PathEvaluationException { if (this.firstEvaluation == null) { this.firstEvaluation = Instant.now(); } /* Let the random completer handle this use-case. */ if (this.randomPathCompleter.getInput().getGoalTester().isGoal(path)) { return null; } /* Time measuring */ Instant startOfEvaluation = Instant.now(); /* Make sure that the completer knows the path until this node */ if (!this.randomPathCompleter.knowsNode(path.getHead())) { synchronized (this.randomPathCompleter) { this.randomPathCompleter.appendPathToNode(path); } } // draw N paths at random List<List<T>> randomPaths = null; try { randomPaths = this.getNRandomPaths(path); } catch (InterruptedException | TimeoutException e) { logger.error("Interrupted in path completion!"); Thread.currentThread().interrupt(); Thread.interrupted(); throw new InterruptedException(); } // order them according to dyad ranking List<ComponentInstance> allRankedPaths; try { allRankedPaths = this.getDyadRankedPaths(randomPaths); } catch (PredictionException e1) { throw new PathEvaluationException("Could not rank nodes", e1); } // random search failed to find anything here if (allRankedPaths.isEmpty()) { return (V) ((Double) 9000.0d); } // get the top k paths List<ComponentInstance> topKRankedPaths = allRankedPaths.subList(0, Math.min(this.evaluatedPaths, allRankedPaths.size())); // evaluate the top k paths List<Pair<ComponentInstance, V>> allEvaluatedPaths = null; try { allEvaluatedPaths = this.evaluateTopKPaths(topKRankedPaths); } catch (InterruptedException | TimeoutException e) { logger.error("Interrupted while predicitng next best solution"); Thread.currentThread().interrupt(); Thread.interrupted(); throw new InterruptedException(); } catch (ExecutionException e2) { logger.error("Couldn't evaluate solution candidates- Returning null as FValue!."); return null; } Duration evaluationTime = Duration.between(startOfEvaluation, Instant.now()); logger.info("Evaluation took {}ms", evaluationTime.toMillis()); V bestSoultion = this.getBestSolution(allEvaluatedPaths); logger.info("Best solution is {}, {}", bestSoultion, allEvaluatedPaths.stream().map(Pair::getY).collect(Collectors.toList())); if (bestSoultion == null) { return (V) ((Double) 9000.0d); } this.eventBus.post(new FValueEvent<V>(null, bestSoultion, evaluationTime.toMillis())); return bestSoultion; } /** * Stolen from {@link RandomCompletionBasedNodeEvaluator}, maybe should refactor * this into a pattern. * * @param node * the starting node * @return the randomPaths, described by their final node * @throws InterruptedException * @throws TimeoutException */ private List<List<T>> getNRandomPaths(final ILabeledPath<T, A> node) throws InterruptedException, TimeoutException { List<List<T>> completedPaths = new ArrayList<>(); for (int currentPath = 0; currentPath < this.randomlyCompletedPaths; currentPath++) { /* * complete the current path by the dfs-solution; we assume that this goes in * almost constant time */ List<T> pathCompletion = null; List<T> completedPath = null; synchronized (this.randomPathCompleter) { if (this.randomPathCompleter.isCanceled()) { logger.info("Completer has been canceled (perhaps due a cancel on the evaluator). Canceling RDFS"); break; } completedPath = new ArrayList<>(node.getNodes()); SearchGraphPath<T, A> solutionPathFromN = null; try { solutionPathFromN = this.randomPathCompleter.nextSolutionUnderSubPath(node); } catch (AlgorithmExecutionCanceledException e) { logger.info("Completer has been canceled. Returning control."); break; } if (solutionPathFromN == null) { logger.info("No completion was found for path {}.", node.getNodes()); break; } pathCompletion = new ArrayList<>(solutionPathFromN.getNodes()); pathCompletion.remove(0); completedPath.addAll(pathCompletion); } completedPaths.add(completedPath); } logger.info("Returning {} paths", completedPaths.size()); return completedPaths; } private List<ComponentInstance> getDyadRankedPaths(final List<List<T>> randomPaths) throws PredictionException, InterruptedException { Map<IVector, ComponentInstance> pipelineToCharacterization = new HashMap<>(); // extract componentInstances that we can rank for (List<T> randomPath : randomPaths) { TFDNode goalNode = (TFDNode) randomPath.get(randomPath.size() - 1); ComponentInstance cI = HASCOUtil.getSolutionCompositionFromState(this.components, goalNode.getState(), true); this.pathToPipelines.put(randomPath, cI); // fill the y with landmarkers if (this.useLandmarkers) { IVector yPrime = this.evaluateLandmarkersForAlgorithm(cI); pipelineToCharacterization.put(yPrime, cI); } else { IVector y = new DenseDoubleVector(this.characterizer.characterize(cI)); if (this.scaler != null) { List<IDyadRankingInstance> asList = Arrays.asList(new SparseDyadRankingInstance(new DenseDoubleVector(this.datasetMetaFeatures), Arrays.asList(y))); DyadRankingDataset dataset = new DyadRankingDataset(asList); this.scaler.transformAlternatives(dataset); } pipelineToCharacterization.put(y, cI); } } // invoke dyad ranker return this.rankRandomPipelines(pipelineToCharacterization); } /** * Calculates the landmarkers for the given Pipeline, if the value * {@link DyadRankingBasedNodeEvaluator#useLandmarkers} is set to * <code>true</code>. * * @param y * the pipeline characterization * @param cI * the pipeline to characterize * @return the meta features of the pipeline with appended landmarking features * @throws InterruptedException */ private IVector evaluateLandmarkersForAlgorithm(final ComponentInstance cI) throws InterruptedException { double[] y = this.characterizer.characterize(cI); int sizeOfYPrime = this.characterizer.getLengthOfCharacterization() + this.landmarkers.length; double[] yPrime = new double[sizeOfYPrime]; System.arraycopy(y, 0, yPrime, 0, y.length); for (int i = 0; i < this.landmarkers.length; i++) { Instances[] subsets = this.landmarkerSets[i]; double score = 0d; for (Instances train : subsets) { FixedSplitClassifierEvaluator evaluator = new FixedSplitClassifierEvaluator(new WekaInstances(train), new WekaInstances(this.evaluationDataset), EClassificationPerformanceMeasure.ERRORRATE); try { score += evaluator.evaluate(this.classifierFactory.getComponentInstantiation(cI)); } catch (InterruptedException e) { throw e; } catch (Exception e) { logger.error("Couldn't get classifier for {}", cI); } } // average the score if (score != 0) { score = score / subsets.length; } yPrime[y.length + i] = score; } return new DenseDoubleVector(yPrime); } private List<ComponentInstance> rankRandomPipelines(final Map<IVector, ComponentInstance> randomPipelines) throws PredictionException, InterruptedException { List<IVector> alternatives = new ArrayList<>(randomPipelines.keySet()); /* Use a sparse instance for ranking */ SparseDyadRankingInstance toRank = new SparseDyadRankingInstance(new DenseDoubleVector(this.datasetMetaFeatures), alternatives); IRanking<IDyad> rankedInstance; rankedInstance = this.dyadRanker.predict(toRank); List<ComponentInstance> rankedPipelines = new ArrayList<>(); for (IDyad dyad : rankedInstance) { rankedPipelines.add(randomPipelines.get(dyad.getAlternative())); } return rankedPipelines; } /** * Invokes the solution-evaluator to get the performances of the best k paths. * * @param topKRankedPaths * the paths, after ranking * @return the list of scores. * @throws InterruptedException * @throws ExecutionException * @throws TimeoutException */ private List<Pair<ComponentInstance, V>> evaluateTopKPaths(final List<ComponentInstance> topKRankedPaths) throws InterruptedException, ExecutionException, TimeoutException { // we use the executionservice mechanism to make sure we wait at most 5 seconds // for an evaluation Executor executor = Executors.newFixedThreadPool(1); CompletionService<Pair<ComponentInstance, V>> completionService = new ExecutorCompletionService<>(executor); List<Pair<ComponentInstance, V>> evaluatedSolutions = new ArrayList<>(); // schedule the tasks for (ComponentInstance node : topKRankedPaths) { completionService.submit(() -> { try { Instant startTime = Instant.now(); V score = this.pipelineEvaluator.evaluate(node); Duration evalTime = Duration.between(startTime, Instant.now()); this.postSolution(node, evalTime.toMillis(), score); return new Pair<>(node, score); } catch (Exception e) { logger.error("Couldn't evaluate {}", node); if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } return null; } }); } // collect the results but not wait longer than 5 seconds for a result to appear for (int i = 0; i < topKRankedPaths.size(); i++) { logger.info("Got {} solutions. Waiting for iteration {} of max iterations {}", evaluatedSolutions.size(), i + 1, topKRankedPaths.size()); Future<Pair<ComponentInstance, V>> evaluatedPipe = completionService.poll(20, TimeUnit.SECONDS); if (evaluatedPipe == null) { logger.info("Didn't receive any futures (expected {} futures)", topKRankedPaths.size()); continue; } try { Pair<ComponentInstance, V> solution = evaluatedPipe.get(20, TimeUnit.SECONDS); if (solution != null) { logger.info("Evaluation was successful. Adding {} to solutions", solution.getY()); evaluatedSolutions.add(solution); } else { logger.info("No solution was found while waiting up to 20s."); evaluatedPipe.cancel(true); } } catch (InterruptedException e) { throw e; } catch (Exception e) { logger.info("Got exception while evaluating {}", e.getMessage()); } } return evaluatedSolutions; } /** * Aggregates a list of found solutions to a f-value. Currently, this is the * minimal value found * * @param allFoundSolutions * all solutions * @return */ private V getBestSolution(final List<Pair<ComponentInstance, V>> allEvaluatedPaths) { return allEvaluatedPaths.stream().map(Pair::getY).min(V::compareTo).orElse(null); } @Override public void setGenerator(final IGraphGenerator<T, A> generator, final IPathGoalTester<T, A> goalTester) { this.graphGenerator = generator; this.goalTester = goalTester; this.initializeRandomSearch(); } /** * Can be used to reinitialize the random-search at every call of the f-Value * computation. * * @param generator */ private void initializeRandomSearch() { IPathEvaluator<T, A, Double> nodeEvaluator = new RandomizedDepthFirstNodeEvaluator<>(this.random); GraphSearchWithSubpathEvaluationsInput<T, A, Double> completionProblem = new GraphSearchWithSubpathEvaluationsInput<>(this.graphGenerator, this.goalTester, nodeEvaluator); this.randomPathCompleter = new RandomSearch<>(completionProblem, null, this.random); while (!(this.randomPathCompleter.next() instanceof AlgorithmInitializedEvent)) { /* do not do anything, just skip until InitializationEvent is observed */ } } /** * Sets the data set in the node evaluator and calculates its meta features. * * @param dataset */ public void setDataset(final Instances dataset) { try { if (this.useLandmarkers) { List<Instances> split; split = WekaUtil.getStratifiedSplit(dataset, 42l, 0.8); Instances trainData = split.get(0); this.evaluationDataset = split.get(1); Map<String, Double> metaFeatures; metaFeatures = new LandmarkerCharacterizer().characterize(dataset); this.datasetMetaFeatures = metaFeatures.entrySet().stream().mapToDouble(Map.Entry::getValue).toArray(); this.setUpLandmarkingDatasets(dataset, trainData); } else { Map<String, Double> metaFeatures = new LandmarkerCharacterizer().characterize(dataset); this.datasetMetaFeatures = metaFeatures.entrySet().stream().mapToDouble(Map.Entry::getValue).toArray(); } } catch (SplitFailedException e) { throw new IllegalArgumentException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (Exception e) { logger.error("Failed to characterize the dataset", e); } } /** * Sets up the training data for the landmarkers that should be used. */ private void setUpLandmarkingDatasets(final Instances dataset, final Instances trainData) { this.landmarkerSets = new Instances[this.landmarkers.length][this.landmarkerSampleSize]; // draw instances used for the landmarkers for (int i = 0; i < this.landmarkers.length; i++) { int landmarker = this.landmarkers[i]; for (int j = 0; j < this.landmarkerSampleSize; j++) { Instances instances = new Instances(dataset, landmarker); for (int k = 0; k < landmarker; k++) { int randomEntry = this.random.nextInt(trainData.size()); instances.add(trainData.get(randomEntry)); } this.landmarkerSets[i][j] = instances; } } } /** * Posts the solution to the EventBus of the search. * * @param solution * evaluated pipeline * @param time * time it took * @param score * the observed score */ protected void postSolution(final ComponentInstance solution, final long time, final V score) { try { @SuppressWarnings("unchecked") List<T> pathToSolution = (List<T>) this.pathToPipelines.getKey(solution); EvaluatedSearchGraphPath<T, ?, V> solutionObject = new EvaluatedSearchGraphPath<>(pathToSolution, null, score); solutionObject.setAnnotation("fTime", time); solutionObject.setAnnotation("timeToSolution", Duration.between(this.firstEvaluation, Instant.now()).toMillis()); solutionObject.setAnnotation("nodesEvaluatedToSolution", this.randomlyCompletedPaths); logger.debug("Posting solution {}", solutionObject); this.eventBus.post(new EvaluatedSearchSolutionCandidateFoundEvent<>(null, solutionObject)); } catch (Exception e) { logger.error("Couldn't post solution to event bus.", e); } } public void setPipelineEvaluator(final IObjectEvaluator<ComponentInstance, V> wrappedSearchBenchmark) { this.pipelineEvaluator = wrappedSearchBenchmark; } @Override public boolean requiresGraphGenerator() { return true; } @Override public void registerSolutionListener(final Object listener) { this.eventBus.register(listener); } @Override public boolean reportsSolutions() { return true; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/dyadranking/WEKADyadRankedNodeQueue.java
package ai.libs.mlplan.metamining.dyadranking; import java.util.ArrayList; import java.util.Collection; import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IEvaluatedPath; import org.api4.java.common.math.IVector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.hasco.core.HASCOUtil; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.IDyadRanker; import ai.libs.jaicore.ml.ranking.dyad.learner.search.ADyadRankedNodeQueue; import ai.libs.jaicore.ml.ranking.dyad.learner.util.AbstractDyadScaler; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.mlplan.metamining.pipelinecharacterizing.IPipelineCharacterizer; /** * A queue that uses a dyad ranker to rank WEKA pipelines. * * @author Helena Graf * */ public class WEKADyadRankedNodeQueue extends ADyadRankedNodeQueue<TFDNode, Double> { private Logger logger = LoggerFactory.getLogger(WEKADyadRankedNodeQueue.class); /** * the allowed components of the pipelines */ private Collection<IComponent> components; /** * the characterizer for characterizing (partial) pipelines */ private IPipelineCharacterizer characterizer; /** * Construct a new WEKA dyad ranked node queue that ranks WEKA pipelines * constructed from the given components in the given context. * * @param contextCharacterization * the characterization of the dataset (the context) * @param components * the search space components * @param ranker * the ranker to use to rank the dyads - must be pre-trained * @param scaler * the scaler to use to scale the dataset - must have been fit to * data already */ public WEKADyadRankedNodeQueue(final IVector contextCharacterization, final Collection<? extends IComponent> components, final IDyadRanker ranker, final AbstractDyadScaler scaler, final IPipelineCharacterizer characterizer) { super(contextCharacterization, ranker, scaler); this.components = new ArrayList<>(components); this.characterizer = characterizer; } @Override protected IVector characterize(final IEvaluatedPath<TFDNode, ?, Double> path) { ComponentInstance cI = HASCOUtil.getComponentInstanceFromState(this.components, path.getHead().getState(), "solution", true); if (cI != null) { this.logger.debug("Characterizing new node."); return new DenseDoubleVector(this.characterizer.characterize(cI)); } else { this.logger.debug("CI from node for characterization is null."); return new DenseDoubleVector(this.characterizer.getLengthOfCharacterization(), 0); } } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/dyadranking/WEKADyadRankedNodeQueueConfig.java
package ai.libs.mlplan.metamining.dyadranking; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import org.openml.webapplication.fantail.dc.Characterizer; import org.openml.webapplication.fantail.dc.DatasetCharacterizerInitializationFailedException; import org.openml.webapplication.fantail.dc.LandmarkerCharacterizer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; import ai.libs.jaicore.ml.ranking.dyad.learner.search.ADyadRankedNodeQueueConfig; import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode; import ai.libs.jaicore.search.algorithms.standard.bestfirst.BestFirst; import ai.libs.mlplan.metamining.pipelinecharacterizing.ComponentInstanceVectorFeatureGenerator; import ai.libs.mlplan.metamining.pipelinecharacterizing.IPipelineCharacterizer; import weka.core.Instances; /** * A configuration class that contains configurable variables for using ML-Plan * with best-first search and a dyad-ranked OPEN list instead of random * completions. * * @author Helena Graf * */ public class WEKADyadRankedNodeQueueConfig extends ADyadRankedNodeQueueConfig<TFDNode> { private Logger logger = LoggerFactory.getLogger(WEKADyadRankedNodeQueueConfig.class); /** * the characterizer used to characterize new datasets, must produce dataset * meta data of the same format the dyad ranker is trained with */ private Characterizer datasetCharacterizer; /** * the characterizer used to characterize new pipelines; must produce pipeline * meta features of the same format the dyad ranker is trained with */ private IPipelineCharacterizer pipelineCharacterizer; /** * characterization of the dataset the WEKA classifiers are applied to */ private double[] contextCharacterization; /** * components used during the search necessary so that the pipeline * characterizer can translate nodes to components instances */ private Collection<IComponent> components; /** * Create a new configuration for a WEAK dyad ranked node queue. * * @throws IOException * if the default ranker or scaler cannot be loaded * @throws ClassNotFoundException * if the default ranker or scaler cannot be instantiated * @throws DatasetCharacterizerInitializationFailedException * if the default dataset characterizer cannot be instantiated */ public WEKADyadRankedNodeQueueConfig() throws ClassNotFoundException, IOException, DatasetCharacterizerInitializationFailedException { super(); this.datasetCharacterizer = new LandmarkerCharacterizer(); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void configureBestFirst(final BestFirst bestFirst) { this.logger.trace("Configuring OPEN list of BF"); bestFirst.setOpen(new WEKADyadRankedNodeQueue(new DenseDoubleVector(this.contextCharacterization), this.components, this.ranker, this.scaler, this.pipelineCharacterizer)); } /** * Configure the data in the context of whose metafeatures the dyad ranker ranks * the pipelines. * * @param data * the data to use * @throws Exception */ public void setData(final Instances data) throws Exception { this.logger.trace("Setting data to instances of size {}", data.size()); this.contextCharacterization = this.datasetCharacterizer.characterizeAll(data).entrySet().stream() .mapToDouble(Map.Entry::getValue).toArray(); } /** * Configure the dyad ranked node queue to use the given components for the * pipeline characterizer to transform nodes to component instances. * * @param components * the components to use for the pipeline characterizer */ public void setComponents(final Collection<? extends IComponent> components) { this.components = new ArrayList<>(components); if (this.pipelineCharacterizer == null) { this.pipelineCharacterizer = new ComponentInstanceVectorFeatureGenerator(components); } } /** * Set the dataset characterizer to be used. must produce dataset * meta data of the same format the dyad ranker is trained with. * * @param datasetCharacterizer */ public void setDatasetCharacterizer(final Characterizer datasetCharacterizer) { this.datasetCharacterizer = datasetCharacterizer; } /** * Set the pipeline characterizer to be used, must produce pipeline * meta features of the same format the dyad ranker is trained with. * * @param pipelineCharacterizer the pipeline characterizer to use */ public void setPipelineCharacterizer(final IPipelineCharacterizer pipelineCharacterizer) { this.pipelineCharacterizer = pipelineCharacterizer; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/AWEKAPerformanceDecisionTreeBasedFeatureGenerator.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.util.ArrayList; import java.util.Map; import org.api4.java.ai.ml.core.exception.TrainingException; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.common.math.IVector; import weka.core.Attribute; import weka.core.DenseInstance; import weka.core.Instances; /** * A {@link IPerformanceDecisionTreeBasedFeatureGenerator} that uses a WEKA * implementation of a decision tree. * * @author Helena Graf * */ public abstract class AWEKAPerformanceDecisionTreeBasedFeatureGenerator implements IPerformanceDecisionTreeBasedFeatureGenerator { @Override public void train(final Map<IVector, Double> intermediatePipelineRepresentationsWithPerformanceValues) throws TrainingException { // Step 1: Transform to Instances Object ArrayList<Attribute> attInfo = new ArrayList<>(); for (int i = 0; i < intermediatePipelineRepresentationsWithPerformanceValues.keySet().toArray(new IVector[0])[0].length(); i++) { attInfo.add(new Attribute("Attribute-" + i)); } attInfo.add(new Attribute("Target")); Instances train = new Instances("train", attInfo, intermediatePipelineRepresentationsWithPerformanceValues.size()); train.setClassIndex(train.numAttributes() - 1); intermediatePipelineRepresentationsWithPerformanceValues.forEach((features, value) -> { double[] values = new double[features.length() + 1]; for (int i = 0; i < features.length(); i++) { values[i] = features.getValue(i); } values[values.length - 1] = value; train.add(new DenseInstance(1, values)); }); try { this.train(train); } catch (AlgorithmException e) { throw new TrainingException("Could not train the " + this.getClass().getName() + ".", e); } } /** * Constructs an internal decision tree based on the Instances object so that * the feature generator can be used in the future to predict features for some * new vector ({@link #predict(IVector)}). * * @param data * @throws Exception */ public abstract void train(Instances data) throws AlgorithmException; }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/ComponentInstanceStringConverter.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.math3.geometry.euclidean.oned.Interval; import org.apache.commons.math3.geometry.partitioning.Region.Location; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap; import ai.libs.jaicore.components.api.INumericParameterRefinementConfiguration; import ai.libs.jaicore.components.api.IParameter; import ai.libs.jaicore.components.model.CompositionProblemUtil; import ai.libs.jaicore.components.model.NumericParameterDomain; import treeminer.util.TreeRepresentationUtils; public class ComponentInstanceStringConverter extends Thread { private static final String WEKA_LABEL_FILE = "weka-labels.properties"; private static final Logger log = LoggerFactory.getLogger(ComponentInstanceStringConverter.class); /** * The name of the top node for all pipelines */ private String pipelineTreeName = "0"; private IOntologyConnector ontologyConnector; private List<IComponentInstance> cIs; private Properties wekaLabels; private List<String> convertedPipelines; private INumericParameterRefinementConfigurationMap componentParameters; public ComponentInstanceStringConverter(final IOntologyConnector ontologyConnector, final List<? extends IComponentInstance> cIs, final INumericParameterRefinementConfigurationMap componentParameters) { this.ontologyConnector = ontologyConnector; this.cIs = new ArrayList<>(cIs); this.convertedPipelines = new ArrayList<>(cIs.size()); this.componentParameters = componentParameters; InputStream fis = this.getClass().getClassLoader().getResourceAsStream(WEKA_LABEL_FILE); this.wekaLabels = new Properties(); try { this.wekaLabels.load(fis); } catch (IOException e) { log.warn("Could not load weka labels."); throw new ComponentInstanceStringConverterIntializeException(e); } } @Override public void run() { for (IComponentInstance cI : this.cIs) { String pipeline = this.makeStringTreeRepresentation(cI); this.convertedPipelines.add(pipeline); } } /** * Converts the given MLPipeline to a String representation of its components * using the ontology. * * @param pipeline * The pipeline to convert * @return The string representation of the tree deduced from the pipeline * */ public String makeStringTreeRepresentation(final IComponentInstance pipeline) { List<String> pipelineBranches = new ArrayList<>(); IComponentInstance classifierCI; // Component is pipeline if (pipeline == null) { log.warn("Try to characterize a null pipeline"); return null; } if (pipeline.getComponent().getName().equals("pipeline")) { IComponentInstance preprocessorCI = pipeline.getSatisfactionOfRequiredInterface("preprocessor").iterator().next(); if (preprocessorCI != null) { // Characterize searcher this.addCharacterizationOfPipelineElement(pipelineBranches, preprocessorCI.getSatisfactionOfRequiredInterface("search").iterator().next()); // Characterize evaluator this.addCharacterizationOfPipelineElement(pipelineBranches, preprocessorCI.getSatisfactionOfRequiredInterface("eval").iterator().next()); } classifierCI = pipeline.getSatisfactionOfRequiredInterface("classifier").iterator().next(); // Component is just a classifier } else { classifierCI = pipeline; } // Characterize classifier this.addCharacterizationOfPipelineElement(pipelineBranches, classifierCI); // Put tree together String toReturn = TreeRepresentationUtils.addChildrenToNode(this.pipelineTreeName, pipelineBranches); // if we have a properties file which maps our weka label to integers; use it if (this.wekaLabels != null) { Pattern p = Pattern.compile(" "); return p.splitAsStream(toReturn).filter(s -> !"".equals(s)).map(s -> this.wekaLabels.getProperty(s, s)).collect(Collectors.joining(" ")); } else { log.error("Did not find label property mapper."); throw new IllegalStateException(); } } /** * Gets the ontology characterization and selected parameters of the given * ComponentInstance and adds its characterization (the branch of a tree that is * the current pipeline) to the pipeline tree by adding its branch * representation as a last element of the list of branches. * * @param pipelineBranches * The current branches of the pipeline. * @param componentInstance * The pipeline element to be characterized */ protected void addCharacterizationOfPipelineElement(final List<String> pipelineBranches, final IComponentInstance componentInstance) { if (componentInstance != null) { String wekaName = componentInstance.getComponent().getName(); // Get generalization List<String> branchComponents = this.ontologyConnector.getAncestorsOfAlgorithm(wekaName); // Get parameters branchComponents.set(branchComponents.size() - 1, TreeRepresentationUtils.addChildrenToNode(branchComponents.get(branchComponents.size() - 1), this.getParametersForComponentInstance(componentInstance))); // Serialize String branch = TreeRepresentationUtils.makeRepresentationForBranch(branchComponents); pipelineBranches.add(branch); } } /** * Get String representations of the parameters of the given ComponentInstance * (representing a pipeline element). Numerical parameters are refined. * * @param componentInstance * The ComponentInstance for which to get the parameters * @return A list of parameter descriptions represented as Strings */ protected List<String> getParametersForComponentInstance(final IComponentInstance componentInstance) { List<String> parameters = new ArrayList<>(); // Get Parameters of base classifier if this is a meta classifier if (componentInstance.getSatisfactionOfRequiredInterfaces() != null && componentInstance.getSatisfactionOfRequiredInterfaces().size() > 0) { componentInstance.getSatisfactionOfRequiredInterfaces().forEach((requiredInterface, component) -> { // so far, only have the "K" interface & this has no param so can directly get List<String> kernelFunctionCharacterisation = new ArrayList<>(); kernelFunctionCharacterisation.add(requiredInterface); kernelFunctionCharacterisation.addAll(this.ontologyConnector.getAncestorsOfAlgorithm(component.iterator().next().getComponent().getName())); parameters.add(TreeRepresentationUtils.addChildrenToNode(requiredInterface, Arrays.asList(TreeRepresentationUtils.makeRepresentationForBranch(kernelFunctionCharacterisation)))); }); } // Get other parameters for (IParameter parameter : componentInstance.getComponent().getParameters()) { // Check if the parameter even has a value! String parameterName = parameter.getName(); if (!componentInstance.getParameterValues().containsKey(parameterName)) { continue; } List<String> parameterRefinement = new ArrayList<>(); parameterRefinement.add(parameterName); // Numeric parameter - needs to be refined if (parameter.isNumeric()) { this.resolveNumericParameter(componentInstance, parameter, parameterName, parameterRefinement); // Categorical parameter } else { if (parameter.isCategorical()) { parameterRefinement.add(componentInstance.getParameterValues().get(parameterName)); } } parameters.add(TreeRepresentationUtils.makeRepresentationForBranch(parameterRefinement)); } return parameters; } private void resolveNumericParameter(final IComponentInstance componentInstance, final IParameter parameter, final String parameterName, final List<String> parameterRefinement) { INumericParameterRefinementConfiguration parameterRefinementConfiguration = this.componentParameters.getRefinement(componentInstance.getComponent(), parameter); NumericParameterDomain parameterDomain = ((NumericParameterDomain) parameter.getDefaultDomain()); Interval currentInterval = null; Interval nextInterval = new Interval(parameterDomain.getMin(), parameterDomain.getMax()); double parameterValue = Double.parseDouble(componentInstance.getParameterValues().get(parameterName)); double precision = parameterValue == 0 ? 0 : Math.ulp(parameterValue); while (true) { currentInterval = nextInterval; parameterRefinement.add(this.serializeInterval(currentInterval)); List<Interval> refinement = CompositionProblemUtil.getNumericParameterRefinement(nextInterval, parameterValue, parameterDomain.isInteger(), parameterRefinementConfiguration); if (refinement.isEmpty()) { break; } for (Interval interval : refinement) { if (interval.checkPoint(parameterValue, precision) == Location.INSIDE || interval.checkPoint(parameterValue, precision) == Location.BOUNDARY) { nextInterval = interval; break; } } } parameterRefinement.add(String.valueOf(parameterValue)); } /** * Helper method for serializing an interval so that it can be used in String * representations of parameters of pipeline elements. * * @param interval * The interval to be serialized * @return The String representation of the interval */ protected String serializeInterval(final Interval interval) { StringBuilder builder = new StringBuilder(); builder.append("["); builder.append(interval.getInf()); builder.append(","); builder.append(interval.getSup()); builder.append("]"); return builder.toString(); } public List<String> getConvertedPipelines() { return this.convertedPipelines; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/ComponentInstanceVectorFeatureGenerator.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.api4.java.common.math.IVector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.api.IParameter; import ai.libs.jaicore.components.model.CategoricalParameterDomain; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; /** * Characterizes a pipelines by the components that occur in it and the parameters that are set for it. * * @author Mirko Jürgens * */ public class ComponentInstanceVectorFeatureGenerator implements IPipelineCharacterizer { private static final Logger logger = LoggerFactory.getLogger(ComponentInstanceVectorFeatureGenerator.class); /** * Maps the name of a component to a map that maps the name of the hyper parameter to its index in the dyad vector. */ private Map<String, Map<String, Integer>> componentNameToParameterDyadIndex = new HashMap<>(); /** * Maps the name of a component to */ private Map<String, Integer> componentNameToDyadIndex = new HashMap<>(); /** * Number of found patterns. */ private int patternCount; /** * Construct a ComponentInstanceVectorFeatureGenerator that is able to characterize pipelines consisting of the given components and parameters. * * @param collection * the components to use */ public ComponentInstanceVectorFeatureGenerator(final Collection<? extends IComponent> collection) { int counter = 0; logger.debug("Got {} components as input.", collection.size()); for (IComponent component : collection) { logger.debug("Inserting {} at position {}", component.getName(), counter); this.componentNameToDyadIndex.put(component.getName(), counter++); Map<String, Integer> parameterIndices = new HashMap<>(); logger.debug("{} has {} parameters.", component.getName(), component.getParameters().size()); for (IParameter param : component.getParameters()) { if (param.isNumeric()) { parameterIndices.put(param.getName(), counter++); } else if (param.isCategorical()) { parameterIndices.put(param.getName(), counter); CategoricalParameterDomain domain = (CategoricalParameterDomain) param.getDefaultDomain(); counter += domain.getValues().length; } } this.componentNameToParameterDyadIndex.put(component.getName(), parameterIndices); } this.patternCount = counter; } /** * Recursively resolves the components. * * @param cI * the component instance to resolve * @param patterns * the patterns found so far * @return the characterization */ public double[] characterize(final IComponentInstance cI, final IVector patterns) { // first: get the encapsulated component IComponent c = cI.getComponent(); String componentName = c.getName(); // set the used algorithm to '1' int index = this.componentNameToDyadIndex.get(componentName); patterns.setValue(index, 1.0d); // now resolve the parameters Map<String, Integer> parameterIndices = this.componentNameToParameterDyadIndex.get(componentName); // assumption: the values is always set in the parameters vector for (IParameter param : c.getParameters()) { String parameterName = param.getName(); int parameterIndex = parameterIndices.get(parameterName); if (param.isNumeric()) { this.handleNumericalParameter(cI, patterns, param, parameterIndex); } else if (param.isCategorical()) { this.handleCatergoricalParameter(cI, patterns, param, parameterIndex); } } // recursively resolve the patterns for the requiredInterfaces for (Collection<IComponentInstance> requiredInterface : cI.getSatisfactionOfRequiredInterfaces().values()) { this.characterize(requiredInterface.iterator().next(), patterns); } return patterns.asArray(); } private void handleNumericalParameter(final IComponentInstance cI, final IVector patterns, final IParameter param, final int parameterIndex) { if (cI.getParameterValue(param) != null) { double value = Double.parseDouble(cI.getParameterValue(param)); patterns.setValue(parameterIndex, value); } else { double value = (double) param.getDefaultValue(); patterns.setValue(parameterIndex, value); } } private void handleCatergoricalParameter(final IComponentInstance cI, final IVector patterns, final IParameter param, final int parameterIndex) { // the parameters are one-hot-encoded, where the parameterIndex specifies the // one hot index for the first categorical parameter, parameterIndex+1 is the // one-hot index for the second parameter etc. String parameterValue = cI.getParameterValue(param); if (parameterValue == null) { if (param.getDefaultValue() instanceof String) { parameterValue = (String) param.getDefaultValue(); } else { parameterValue = String.valueOf(param.getDefaultValue()); } } CategoricalParameterDomain domain = (CategoricalParameterDomain) param.getDefaultDomain(); for (int i = 0; i < domain.getValues().length; i++) { if (domain.getValues()[i].equals(parameterValue)) { patterns.setValue(parameterIndex + i, 1); } else { patterns.setValue(parameterIndex + i, 0); } } } @Override public void build(final List<? extends IComponentInstance> pipelines) throws InterruptedException { throw new UnsupportedOperationException("This characterizer is not trained!"); } @Override public double[] characterize(final IComponentInstance pipeline) { return this.characterize(pipeline, new DenseDoubleVector(this.patternCount, 0.0d)); } @Override public double[][] getCharacterizationsOfTrainingExamples() { throw new UnsupportedOperationException("This characterizer is not trained!"); } @Override public int getLengthOfCharacterization() { return this.patternCount; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/IPerformanceDecisionTreeBasedFeatureGenerator.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.util.Map; import org.api4.java.ai.ml.core.exception.TrainingException; import org.api4.java.common.math.IVector; /** * A feature generator that is based on a decision tree. Generates new features * for given features together with performance values based on paths in a * decision tree constructed from the given examples. * * @author Helena Graf * */ public interface IPerformanceDecisionTreeBasedFeatureGenerator { /** * Constructs an internal decision tree so that the feature generator can be * used in the future to predict features for some new vector * ({@link #predict(IVector)}). * * @param intermediatePipelineRepresentationsWithPerformanceValues * maps a features to performance value. Should only contain * numerical features. * @throws Exception * if something goes wrong while constructing the tree */ void train(Map<IVector, Double> intermediatePipelineRepresentationsWithPerformanceValues) throws TrainingException; /** * Predicts a feature vector based on a path in the constructed decision tree: * Each node in the tree is given a unique index. Then, for the given vector, * the tree is traversed and a feature vector is generated based on which nodes * are encountered during the traversal. * * @param intermediatePipelineRepresentation * the feature vector for which to generate a new representation * @return the new representation of the given feature vector */ IVector predict(IVector intermediatePipelineRepresentation); }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/IPipelineCharacterizer.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.util.List; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline; /** * Finds patterns in given MLPipelines. A pipeline characterizer first has to be * built with {@link #build(List)}, where it identifies patterns in the given * data base of pipelines. Subsequently, it can be used to check for these * patterns in a new pipeline. * * @author Helena Graf, Mirko Jürgens * */ public interface IPipelineCharacterizer { /** * Finds frequent patterns in the given list of pipelines. * * @param pipelines * The pipelines to go through for patterns * @throws InterruptedException */ public void build(List<? extends IComponentInstance> pipelines) throws InterruptedException; /** * Checks which of the found patterns (found during the training phase in * {@link IPipelineCharacterizer#build(List)}) occur in this pipeline. * * If in the returned list l, l[j]=1, pattern j occurs in this pipeline. * Otherwise l[j]=0 and pattern j doesn't occur in this pipeline. * * @param pipeline * The pipeline for which pattern occurrence is checked * @return A list representing pattern occurrences in the pipeline */ public double[] characterize(IComponentInstance pipeline); /** * For each {@link MLPipeline} that was used in the training (given by its * ComponentInstance), return which found pattern (found during the training * phase in {@link IPipelineCharacterizer#build(List)}) occurs in which * pipeline. * * If in the returned matrix m, m[i][j]=1, pattern j occurs in training pipeline * i. Otherwise m[i][j]=0 and pattern j doesn't occur in training pipeline i. * * @return A matrix representing pattern occurrences in pipelines */ public double[][] getCharacterizationsOfTrainingExamples(); /** * Returns the amount of found pipeline patterns, which is the length of a * characterization. * * @return the length of any array produced by * {@link #characterize(ComponentInstance)}. */ public int getLengthOfCharacterization(); }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/RandomTreePerformanceBasedFeatureGenerator.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.util.HashMap; import java.util.Map; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.common.math.IVector; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; import ai.libs.jaicore.ml.weka.classification.singlelabel.timeseries.learner.trees.AccessibleRandomTree; import ai.libs.jaicore.ml.weka.classification.singlelabel.timeseries.learner.trees.AccessibleRandomTree.AccessibleTree; import weka.classifiers.trees.RandomTree; import weka.core.Instances; /** * A {@link AWEKAPerformanceDecisionTreeBasedFeatureGenerator} that uses a * {@link RandomTree}. * * @author Helena Graf * */ public class RandomTreePerformanceBasedFeatureGenerator extends AWEKAPerformanceDecisionTreeBasedFeatureGenerator { private AccessibleRandomTree randomTree = new AccessibleRandomTree(); private Map<AccessibleTree, Integer> nodesIndices = new HashMap<>(); private AccessibleTree tree; private boolean allowUnsetValues = false; private double incomingUnsetValueValue = Double.NaN; private double outgoingUnsetValueValue = 0; private double occurenceValue = 1; private double nonOccurenceValue = -1; @Override public void train(final Instances data) throws AlgorithmException { // Step 1: Train Random Tree try { this.randomTree.buildClassifier(data); } catch(Exception e) { throw new AlgorithmException("Random Tree could not be trained!", e); } // Step 2: Count the nodes in the tree (DF Traversal Index Mapping) this.addIndexToMap(0, this.randomTree.getMTree()); this.tree = this.randomTree.getMTree(); } private int addIndexToMap(int subTreeIndex, final AccessibleTree subTree) { this.nodesIndices.put(subTree, subTreeIndex); subTreeIndex++; int numberOfSuccessors = 0; if (subTree.getSuccessors() != null) { for (int i = 0; i < subTree.getSuccessors().length; i++) { subTreeIndex += numberOfSuccessors; numberOfSuccessors += this.addIndexToMap(subTreeIndex, subTree.getSuccessors()[i]) + 1; } } return numberOfSuccessors; } @Override public IVector predict(final IVector intermediatePipelineRepresentation) { IVector pipelineRepresentation = new DenseDoubleVector(this.nodesIndices.size(), this.nonOccurenceValue); // Query the RandomTree AccessibleTree subTree = this.tree; while (subTree != null) { if (subTree.getAttribute() == -1) { // We are at a leaf node - The current node occurs pipelineRepresentation.setValue(this.nodesIndices.get(subTree), this.occurenceValue); // We are at a leaf - stop subTree = null; } else if (this.allowUnsetValues && !this.isValueUnset(intermediatePipelineRepresentation.getValue(subTree.getAttribute())) || !this.allowUnsetValues) { // The current node occurs pipelineRepresentation.setValue(this.nodesIndices.get(subTree), this.occurenceValue); if (intermediatePipelineRepresentation.getValue(subTree.getAttribute()) < subTree.getSplitPoint()) { // we go to the left subTree = subTree.getSuccessors()[0]; } else { // we go to the right subTree = subTree.getSuccessors()[1]; } } else { // We do allow unset values and the value is unset - set the subtree to non // occurence and end the traversal this.setSubTreeToValue(subTree, this.outgoingUnsetValueValue, pipelineRepresentation); subTree = null; } } return pipelineRepresentation; } private boolean isValueUnset(final double value) { if (Double.isNaN(this.incomingUnsetValueValue)) { return Double.isNaN(value); } else { return value == this.incomingUnsetValueValue; } } private void setSubTreeToValue(final AccessibleTree subTree, final double value, final IVector featureRepresentation) { featureRepresentation.setValue(this.nodesIndices.get(subTree), value); if (subTree.getSuccessors() != null) { for (int i = 0; i < subTree.getSuccessors().length; i++) { this.setSubTreeToValue(subTree.getSuccessors()[i], value, featureRepresentation); } } } @Override public String toString() { StringBuilder builder = new StringBuilder(); try { builder.append(this.randomTree); } catch (Exception e) { builder.append("Can not print tree"); } builder.append(System.lineSeparator()); builder.append(this.nodesIndices); builder.append(System.lineSeparator()); return builder.toString(); } /** * Get the value that is assumed to mean a missing value for incoming feature * values. Only relevant if missing values for incoming feature vectors are * allowed. * * @return the value that is assumed to mean a missing value for incoming * feature values */ public double getIncomingUnsetValueValue() { return this.incomingUnsetValueValue; } /** * Allow incoming feature vectors to have missing values. * * @param unsetValueValue * the value that represents a value to be missing in incoming * feature vectors. */ public void setAllowNonOccurence(final double unsetValueValue) { this.allowUnsetValues = true; this.incomingUnsetValueValue = unsetValueValue; } /** * Disallow incoming feature vectors from having missing values. */ public void disallowNonOccurence() { this.allowUnsetValues = false; } /** * Get the value that this feature generator sets for areas of the trees that * are not encountered because an attribute that is used as a split in a node * that is encountered is not set in a given feature representation. * * @return the produced value for areas of the tree that are blocked by a * missing feature value */ public double getOutgoingUnsetValueValue() { return this.outgoingUnsetValueValue; } /** * Set the value that this feature generator sets for areas of the trees that * are not encountered because an attribute that is used as a split in a node * that is encountered is not set in a given feature representation. * * @param outgoingUnsetValueValue * the produced value for areas of the tree that are blocked by a * missing feature value */ public void setOutgoingUnsetValueValue(final double outgoingUnsetValueValue) { this.outgoingUnsetValueValue = outgoingUnsetValueValue; } /** * Get the value that this feature generator sets for nodes in the tree that are * encountered during the traversal based on a given feature vector. * * @return the value that this feature generator sets for nodes in the tree that * are encountered */ public double getOccurenceValue() { return this.occurenceValue; } /** * Set the value that this feature generator sets for nodes in the tree that are * encountered during the traversal based on a given feature vector. * * @param occurenceValue * the value that this feature generator sets for nodes in the tree * that are encountered */ public void setOccurenceValue(final double occurenceValue) { this.occurenceValue = occurenceValue; } /** * Get the value that this feature generator sets for nodes in the tree that are * not encountered during the traversal based on a given feature vector. * * @return the value that this feature generator sets for nodes in the tree that * are not encountered */ public double getNonOccurenceValue() { return this.nonOccurenceValue; } /** * Set the value that this feature generator sets for nodes in the tree that are * not encountered during the traversal based on a given feature vector. * * @param nonOccurenceValue * the value that this feature generator sets for nodes in the tree * that are not encountered */ public void setNonOccurenceValue(final double nonOccurenceValue) { this.nonOccurenceValue = nonOccurenceValue; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/ai/libs/mlplan/metamining/pipelinecharacterizing/WEKAPipelineCharacterizer.java
package ai.libs.mlplan.metamining.pipelinecharacterizing; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Scanner; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap; import ai.libs.jaicore.components.model.ComponentInstance; import treeminer.FrequentSubtreeFinder; import treeminer.TreeMiner; import treeminer.util.TreeRepresentationUtils; /** * A characterizer for MLPipelines. It characterizes pipelines using an ontology and a tree mining algorithm. The ontology is used to get a characterization of a pipeline element; from the characterization of all pipelines elements and * their parameters, a tree is then built. The trees retrieved from a number of training examples for pipelines are then used to find frequent patterns in the pipelines. A new pipeline is then characterizes by which of these patterns appear * in it. * * @author Helena Graf * */ public class WEKAPipelineCharacterizer implements IPipelineCharacterizer { private static final Logger logger = LoggerFactory.getLogger(WEKAPipelineCharacterizer.class); /** The default path for pre computed algorithm patterns. */ private static final String ALGORITHM_PATTERNS_SUPPORT_5_PATH = "draco/patterns_support_5.csv"; /** * Number of concurrent threads maximally used by the characterizer */ private int cpus = 1; /** * The ontology connector used to characterize a single pipeline element */ private IOntologyConnector ontologyConnector; /** * The algorithm used by the pipeline characterizer to find frequent subtrees in deduced tree representations of given pipelines */ private FrequentSubtreeFinder treeMiner; /** * The frequent patterns found in the tree representations of pipelines by the tree mining algorithm */ private List<String> foundPipelinePatterns; /** * The minimum support required for a pattern to be considered frequent by the tree miner */ private int patternMinSupport = 5; private INumericParameterRefinementConfigurationMap componentParameters; /** * Creates a new pipeline characterizer that uses the given descriptions of parameters to characterize MLPipelines. * * @param componentParameters * The description of parameters in the current configuration together with their refinements. */ public WEKAPipelineCharacterizer(final INumericParameterRefinementConfigurationMap componentParameters) { TreeMiner miner = new TreeMiner(); miner.setCountMultipleOccurrences(false); miner.setOnlySearchForPatternsThatStartWithTheRoot(true); this.treeMiner = new TreeMiner(); this.componentParameters = componentParameters; try { this.ontologyConnector = new WEKAOntologyConnector(); } catch (OWLOntologyCreationException e) { logger.error("Cannot connect to Ontology!"); throw new OntologyNotFoundException(e); } } /** * Build this pipeline characterizer from a file of patterns. The pattern need to be UTF-8 encoded strings and each line specifies exactly one pattern. * * @param file * the file to read from */ public void buildFromFile(final File file) { List<String> foundPatterns = new ArrayList<>(); try (Scanner scanner = new Scanner(file)) { while (scanner.hasNextLine()) { String pattern = scanner.nextLine(); foundPatterns.add(pattern); } } catch (IOException e) { logger.error("Couldn't initialize pipeline characterizer", e); } this.foundPipelinePatterns = foundPatterns; } /** * Builds the pipeline characterizer with a default list of patterns, which was generated by a random search over the algorithm space of weka. */ public void buildFromFile() { try { this.buildFromFile(Paths.get(this.getClass().getClassLoader().getResource(ALGORITHM_PATTERNS_SUPPORT_5_PATH).toURI()).toFile()); } catch (URISyntaxException e) { logger.error("Couldn't find default algorithm patterns!", e); } } @Override public void build(final List<? extends IComponentInstance> pipelines) throws InterruptedException { // Convert the pipelines to String representations logger.info("Converting training examples to trees. With support {}", this.patternMinSupport); int chunkSize = Math.floorDiv(pipelines.size(), this.cpus); int lastchunkSize = pipelines.size() - (chunkSize * (this.cpus - 1)); ComponentInstanceStringConverter[] threads = new ComponentInstanceStringConverter[this.cpus]; for (int i = 0; i < threads.length; i++) { threads[i] = new ComponentInstanceStringConverter(this.ontologyConnector, pipelines.subList(i * chunkSize, i == threads.length - 1 ? (i * chunkSize) + lastchunkSize : (i + 1) * chunkSize), this.componentParameters); threads[i].start(); } List<String> pipelineRepresentations = new ArrayList<>(pipelines.size()); for (int i = 0; i < threads.length; i++) { threads[i].join(); pipelineRepresentations.addAll(threads[i].getConvertedPipelines()); } // Use the tree miner to find patterns logger.info("Finding frequent subtrees"); this.foundPipelinePatterns = this.treeMiner.findFrequentSubtrees(pipelineRepresentations, this.patternMinSupport); } @Override public double[] characterize(final IComponentInstance pipeline) { // Make tree representation from this pipeline String treeRepresentation = new ComponentInstanceStringConverter(this.ontologyConnector, new ArrayList<>(), this.componentParameters).makeStringTreeRepresentation(pipeline); // Ask the treeMiner which of the patterns are included in this pipeline double[] pipelineCharacterization = new double[this.foundPipelinePatterns.size()]; for (int i = 0; i < this.foundPipelinePatterns.size(); i++) { if (TreeRepresentationUtils.containsSubtree(treeRepresentation, this.foundPipelinePatterns.get(i))) { pipelineCharacterization[i] = 1; } else { pipelineCharacterization[i] = 0; } } return pipelineCharacterization; } @Override public double[][] getCharacterizationsOfTrainingExamples() { return this.treeMiner.getCharacterizationsOfTrainingExamples(); } /** * * Returns the amount of found pipeline patterns, which is the length of a characterization. * * @return the length of any array produced by {@link #characterize(ComponentInstance)}. */ @Override public int getLengthOfCharacterization() { return this.foundPipelinePatterns.size(); } /** * Get the used ontology connector. * * @return The used ontology connector */ public IOntologyConnector getOntologyConnector() { return this.ontologyConnector; } /** * Set the ontology connector to be used. * * @param ontologyConnector * the ontologyConnector to be used */ public void setOntologyConnector(final IOntologyConnector ontologyConnector) { this.ontologyConnector = ontologyConnector; } /** * Get the minimum support required for a pattern to be considered frequent for the tree mining algorithm. * * @return The minimum support a tree pattern must have to be considered frequent */ public int getMinSupport() { return this.patternMinSupport; } /** * Set the minimum support required for a pattern to be considered frequent for the tree mining algorithm. * * @param minSupport * The minimum support a tree pattern must have to be considered frequent */ public void setMinSupport(final int minSupport) { this.patternMinSupport = minSupport; } /** * Inform the Characterizer about resource usage. * * @param cpus * Maximum number of threads that will be used by the characterizer */ public void setCPUs(final int cpus) { this.cpus = cpus; } /** * Get the patterns found among the given training examples. * * @return A list of patterns */ public List<String> getFoundPipelinePatterns() { return this.foundPipelinePatterns; } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail/dc/DatasetCharacterizerInitializationFailedException.java
package org.openml.webapplication.fantail.dc; /** * An exception that signifies something went wrong during the initialization of * a dataset characterizer * * @author Helena Graf * */ public class DatasetCharacterizerInitializationFailedException extends Exception { /** * version number */ private static final long serialVersionUID = -7200872055151544998L; /** * Create an exception with a default message. */ public DatasetCharacterizerInitializationFailedException() { super(); } /** * Create an exception with the given message. * * @param message * the used message */ public DatasetCharacterizerInitializationFailedException(String message) { super(message); } /** * Create an exception with the given cause. * * @param cause * the cause of the exception */ public DatasetCharacterizerInitializationFailedException(Throwable cause) { super(cause); } /** * Create an exception with the given cause and additional message * * @param message * the message * @param cause * the cause of the exception */ public DatasetCharacterizerInitializationFailedException(String message, Throwable cause) { super(message, cause); } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail/dc/GlobalCharacterizer.java
package org.openml.webapplication.fantail.dc; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.commons.lang3.time.StopWatch; import org.openml.webapplication.fantail.dc.Characterizer; import org.openml.webapplication.fantail.dc.landmarking.GenericLandmarker; import org.openml.webapplication.fantail.dc.statistical.Cardinality; import org.openml.webapplication.fantail.dc.statistical.NominalAttDistinctValues; import org.openml.webapplication.fantail.dc.statistical.SimpleMetaFeatures; import org.openml.webapplication.fantail.dc.statistical.Statistical; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import weka.core.Instances; import weka.core.Utils; /** * Characterizer that applies a number of Characterizers to a data set. Uses * probing. Adapted from {@link GlobalMetafeatures}. * * @author Helena Graf * */ public class GlobalCharacterizer extends Characterizer { private final Logger logger = LoggerFactory.getLogger(GlobalCharacterizer.class); // preprocessor prefixes protected static final String PREPROCESSING_PREFIX = "-E \"weka.attributeSelection.CfsSubsetEval -P 1 -E 1\" -S \"weka.attributeSelection.BestFirst -D 1 -N 5\" -W "; protected static final String CP_IBK = "weka.classifiers.lazy.IBk"; protected static final String CP_NB = "weka.classifiers.bayes.NaiveBayes"; protected static final String CP_ASC = "weka.classifiers.meta.AttributeSelectedClassifier"; protected static final String CP_DS = "weka.classifiers.trees.DecisionStump"; /** * The names of all the meta features that are computed by this characterizer */ protected String[] ids; /** * The list of characterizers used in the computation of meta features */ protected ArrayList<Characterizer> characterizers; /** * The names of the characterizers used */ protected Map<Characterizer, String> characterizerNames; /** * The time it took to compute the meta features for each characterizer by name */ protected Map<String, Double> computationTimes = new HashMap<>(); /** * Initializes a new characterizer. Calls {@link #initializeCharacterizers()}, * {@link #initializeCharacterizerNames()} and * {@link #initializeMetaFeatureIds()} in order. * * @throws DatasetCharacterizerInitializationFailedException * if the characterizer cannot be initialized properly */ public GlobalCharacterizer() throws DatasetCharacterizerInitializationFailedException { this.logger.trace("Initialize"); try { this.initializeCharacterizers(); } catch (Exception e) { throw new DatasetCharacterizerInitializationFailedException(e); } this.initializeCharacterizerNames(); this.initializeMetaFeatureIds(); } @Override public Map<String, Double> characterize(final Instances instances) { if (this.logger.isTraceEnabled()) { this.logger.trace("Characterize dataset \"{}\" ...", instances.relationName()); } TreeMap<String, Double> metaFeatures = new TreeMap<>(); StopWatch watch = new StopWatch(); for (Characterizer characterizer : this.characterizers) { try { watch.reset(); watch.start(); metaFeatures.putAll(characterizer.characterize(instances)); watch.stop(); this.computationTimes.put(characterizer.toString(), (double) watch.getTime()); } catch (Exception e) { for (String metaFeature : characterizer.getIDs()) { metaFeatures.put(metaFeature, Double.NaN); } this.computationTimes.put(characterizer.toString(), Double.NaN); } } this.logger.trace("Done characterizing dataset. Feature length: {}", metaFeatures.size()); return metaFeatures; } @Override public String toString() { StringBuilder builder = new StringBuilder(); // Build String representation on the basis of the used characterizers for (Characterizer characterizer : this.characterizers) { builder.append(characterizer.toString()); builder.append(System.lineSeparator()); String[] baseCharacterizerIds = characterizer.getIDs(); for (String id : baseCharacterizerIds) { builder.append(id + ","); } builder.append(System.lineSeparator()); } return builder.toString(); } /** * Gets the list of characterizers used in the computation of meta features. * * @return The characterizers */ public List<Characterizer> getCharacterizers() { return this.characterizers; } /** * Gets the time in milliseconds it took to compute each group of meta features * (Computed by a Characterizer). The computation times for the last time that * {@link #characterize(Instances)} was called are returned. The time is NaN if * the meta feature could not be computed. * * @return The meta feature computation times */ public Map<String, Double> getMetaFeatureComputationTimes() { return this.computationTimes; } /** * Gets the names of the used Characterizers. * * @return The names of the characterizers */ public List<String> getCharacterizerNames() { List<String> names = new ArrayList<>(); this.characterizerNames.values().forEach(names::add); return names; } /** * Gets names for the used Characterizers. * * @return The used Characterizers mapped to their names */ public Map<Characterizer, String> getCharacterizerNamesMappings() { return this.characterizerNames; } /** * Gets the mapping of a Characterizer to the meta features it computes. * * @return The mapping of Characterizer names to their meta features */ public Map<String, List<String>> getCharacterizerGroups() { Map<String, List<String>> results = new HashMap<>(); this.characterizerNames.forEach((characterizer, name) -> results.put(name, Arrays.asList(characterizer.getIDs()))); return results; } @Override public String[] getIDs() { return this.ids; } /** * Adds the required characterizers to {@link #characterizers}. * * @throws Exception */ protected void initializeCharacterizers() throws Exception { this.characterizers = new ArrayList<>(); this.addNoProbingCharacterizers(this.characterizers); this.addLandmarkerCharacterizers(this.characterizers); } protected void addNoProbingCharacterizers(final ArrayList<Characterizer> characterizerList) { characterizerList.addAll(Arrays.asList(new SimpleMetaFeatures(), new Statistical(), new NominalAttDistinctValues(), new Cardinality())); } protected void addLandmarkerCharacterizers(final ArrayList<Characterizer> characterizerList) throws Exception { characterizerList.addAll(Arrays.asList(new GenericLandmarker("kNN1N", CP_IBK, 2, null), new GenericLandmarker("NaiveBayes", CP_NB, 2, null), new GenericLandmarker("DecisionStump", CP_DS, 2, null), new GenericLandmarker("CfsSubsetEval_kNN1N", CP_ASC, 2, Utils.splitOptions(PREPROCESSING_PREFIX + CP_IBK)), new GenericLandmarker("CfsSubsetEval_NaiveBayes", CP_ASC, 2, Utils.splitOptions(PREPROCESSING_PREFIX + CP_NB)), new GenericLandmarker("CfsSubsetEval_DecisionStump", CP_ASC, 2, Utils.splitOptions(PREPROCESSING_PREFIX + CP_DS)))); StringBuilder zeroes = new StringBuilder(); zeroes.append("0"); for (int i = 1; i <= 3; ++i) { zeroes.append("0"); String[] j48Option = { "-C", "." + zeroes.toString() + "1" }; characterizerList.add(new GenericLandmarker("J48." + zeroes.toString() + "1.", "weka.classifiers.trees.J48", 2, j48Option)); String[] repOption = { "-L", "" + i }; characterizerList.add(new GenericLandmarker("REPTreeDepth" + i, "weka.classifiers.trees.REPTree", 2, repOption)); String[] randomtreeOption = { "-depth", "" + i }; characterizerList.add(new GenericLandmarker("RandomTreeDepth" + i, "weka.classifiers.trees.RandomTree", 2, randomtreeOption)); } } /** * Initializes {@link #characterizerNames}. */ protected void initializeCharacterizerNames() { this.characterizerNames = new HashMap<>(); this.characterizers.forEach(characterizer -> { if (characterizer.getClass().equals(GenericLandmarker.class)) { String aUCName = characterizer.getIDs()[0]; String name = aUCName.substring(0, aUCName.length() - 3); this.characterizerNames.put(characterizer, name); } else { this.characterizerNames.put(characterizer, characterizer.getClass().getSimpleName()); } }); } /** * Initializes {@link #ids}. */ protected void initializeMetaFeatureIds() { List<String> metaFeatures = new ArrayList<>(); for (Characterizer characterizer : this.characterizers) { for (String metaFeature : characterizer.getIDs()) { metaFeatures.add(metaFeature); } } this.ids = metaFeatures.toArray(new String[metaFeatures.size()]); } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail/dc/LandmarkerCharacterizer.java
package org.openml.webapplication.fantail.dc; import java.util.ArrayList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Characterizer that applies several characterizers to a data set, but does * not use any probing. * * @author Helena Graf, Mirko * */ public class LandmarkerCharacterizer extends GlobalCharacterizer { private Logger logger = LoggerFactory.getLogger(LandmarkerCharacterizer.class); /** * Constructs a new LandmarkerCharacterizer. Construction is the same as for the * {@link ranker.core.metafeatures.GlobalCharacterizer}, except that only * Characterizers that do not use probing are initialized. * * @throws DatasetCharacterizerInitializationFailedException * if the characterizer cannot be initialized properly */ public LandmarkerCharacterizer() throws DatasetCharacterizerInitializationFailedException { super(); logger.trace("Initialize"); } @Override protected void initializeCharacterizers() throws Exception { super.characterizers = new ArrayList<>(); super.addLandmarkerCharacterizers(characterizers); } }
0
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail
java-sources/ai/libs/mlplan-ext-metalearning/0.2.7/org/openml/webapplication/fantail/dc/NoProbingCharacterizer.java
package org.openml.webapplication.fantail.dc; import java.util.ArrayList; import java.util.Arrays; import org.openml.webapplication.fantail.dc.Characterizer; import org.openml.webapplication.fantail.dc.statistical.Cardinality; import org.openml.webapplication.fantail.dc.statistical.NominalAttDistinctValues; import org.openml.webapplication.fantail.dc.statistical.SimpleMetaFeatures; import org.openml.webapplication.fantail.dc.statistical.Statistical; /** * A Characterizer that applies several characterizers to a data set, but does * not use any probing. * * @author Helena Graf * */ public class NoProbingCharacterizer extends GlobalCharacterizer { /** * Constructs a new NoProbingCharacterizer. Construction is the same as for the * {@link ranker.core.metafeatures.GlobalCharacterizer}, except that only Characterizers that do not use probing * are initialized. * @throws DatasetCharacterizerInitializationFailedException if the characterizer cannot be initialized properly * */ public NoProbingCharacterizer() throws DatasetCharacterizerInitializationFailedException { super(); } @Override protected void initializeCharacterizers() { Characterizer[] characterizerArray = { new SimpleMetaFeatures(), new Statistical(), new NominalAttDistinctValues(), new Cardinality() }; characterizers = new ArrayList<>(Arrays.asList(characterizerArray)); } }