index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/evaluation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/evaluation/splitsetgenerator/MonteCarloCrossValidationSplitSetGenerator.java
package ai.libs.jaicore.ml.core.evaluation.splitsetgenerator; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.core.dataset.splitter.IRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.evaluation.execution.IDatasetSplitSet; import org.api4.java.ai.ml.core.evaluation.execution.IDatasetSplitSetGenerator; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.ml.core.dataset.splitter.DatasetSplitSet; /** * A DatasetSplitSetGenerator that create k independent splits of the given dataset. * The type of split can be configured using the IDatasetSplitter. * The parameter k is configured over the variable "repeats" * * @author fmohr * */ public class MonteCarloCrossValidationSplitSetGenerator<D extends ILabeledDataset<?>> implements IDatasetSplitSetGenerator<D>, ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(MonteCarloCrossValidationSplitSetGenerator.class); private final IRandomDatasetSplitter<D> datasetSplitter; private final int repeats; private final long seed; private int runningSeed; public MonteCarloCrossValidationSplitSetGenerator(final IRandomDatasetSplitter<D> datasetSplitter, final int repeats, final Random random) { super(); this.datasetSplitter = datasetSplitter; this.repeats = repeats; if (repeats <= 0) { throw new IllegalArgumentException("Cannot create MCCV split generator for non-positive number of repeats " + repeats + ". Set A positive number of repeats."); } this.seed = random.nextLong(); // we do not want to use the random object any further, because (i) the randomness inside should not be affected by outer operations and (ii) having a concrete seed augments the reproducibility } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger.info("Switching logger of {} from {} to {}", this, this.logger.getName(), name); this.logger = LoggerFactory.getLogger(name); this.logger.info("Switched logger of {} to {}", this, name); if (this.datasetSplitter instanceof ILoggingCustomizable) { ((ILoggingCustomizable) this.datasetSplitter).setLoggerName(name + ".splitter"); this.logger.info("Setting logger of splitter {} to {}.splitter", this.datasetSplitter.getClass().getName(), name); } else { this.logger.info("Base splitter {} is not configurable for logging, so not configuring it.", this.datasetSplitter.getClass().getName()); } } @Override public int getNumSplitsPerSet() { return this.repeats; } @Override public int getNumFoldsPerSplit() { return this.datasetSplitter.getNumberOfFoldsPerSplit(); } @Override public IDatasetSplitSet<D> nextSplitSet(final D data) throws InterruptedException, SplitFailedException { this.logger.info("Generating next split set of size {} for dataset with {} instances.", this.repeats, data.size()); if (Thread.interrupted()) { // clear the interrupted field. This is Java a general convention when an // InterruptedException is thrown (see Java documentation for details) this.logger.info("MCCV has been interrupted, leaving MCCV."); throw new InterruptedException("MCCV has been interrupted."); } List<List<D>> splits = new ArrayList<>(this.repeats); for (int i = 0; i < this.repeats; i++) { long vSeed = this.seed + this.runningSeed; this.logger.debug("Invoking dataset splitter {} with Random({})", this.datasetSplitter, vSeed); splits.add(this.datasetSplitter.split(data, new Random(vSeed))); this.runningSeed ++; } return new DatasetSplitSet<>(splits); } @Override public String toString() { return "MonteCarloCrossValidationSplitSetGenerator [datasetSplitter=" + this.datasetSplitter + ", repeats=" + this.repeats + ", seed=" + this.seed + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/CheckedJaicoreMLException.java
package ai.libs.jaicore.ml.core.exception; /** * The {@link CheckedJaicoreMLException} serves as a base class for all checked {@link Exception}s defined as part of jaicore-ml. * * @author Alexander Hetzer * */ public abstract class CheckedJaicoreMLException extends Exception { private static final long serialVersionUID = 7366050163157197392L; /** * Creates a new {@link CheckedJaicoreMLException} with the given parameters. * * @param cause * The underlying cause of this {@link Exception}. */ public CheckedJaicoreMLException(final Throwable cause) { super(cause); } /** * Creates a new {@link CheckedJaicoreMLException} with the given parameters. * * @param message * The message of this {@link Exception}. * @param cause * The underlying cause of this {@link Exception}. */ public CheckedJaicoreMLException(final String message, final Throwable cause) { super(message, cause); } /** * Creates a new {@link CheckedJaicoreMLException} with the given parameters. * * @param message * The message of this {@link Exception}. */ public CheckedJaicoreMLException(final String message) { super(message); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/ConfigurationException.java
package ai.libs.jaicore.ml.core.exception; /** * The {@link ConfigurationException} indicates an error during a configuration process. Details concerning the error can be inferred from the associated message. * * @author Alexander Hetzer * */ public class ConfigurationException extends CheckedJaicoreMLException { private static final long serialVersionUID = 3979468542526154560L; /** * Creates a new {@link ConfigurationException} with the given parameters. * * @param message * The message of this {@link Exception}. * @param cause * The underlying cause of this {@link Exception}. */ public ConfigurationException(String message, Throwable cause) { super(message, cause); } /** * Creates a new {@link ConfigurationException} with the given parameters. * * @param message * The message of this {@link Exception}. */ public ConfigurationException(String message) { super(message); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/DatasetCapacityReachedException.java
package ai.libs.jaicore.ml.core.exception; import ai.libs.jaicore.ml.classification.singlelabel.timeseries.dataset.TimeSeriesDataset; /** * Exception that indicates that the capacity of a {@link TimeSeriesDataset} is * reached. I.e. the maximum nuber of instances is already contained in the * dataset. */ public class DatasetCapacityReachedException extends CheckedJaicoreMLException { private static final long serialVersionUID = 8108652448377411780L; /** * Creates a new {@link DatasetCapacityReachedException} with the given * parameters. * * @param message The message of this {@link Exception}. * @param cause The underlying cause of this {@link Exception}. */ public DatasetCapacityReachedException(String message, Throwable cause) { super(message, cause); } /** * Creates a new {@link DatasetCapacityReachedException} with the given * parameters. * * @param message The message of this {@link Exception}. */ public DatasetCapacityReachedException(String message) { super(message); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/EvaluationException.java
package ai.libs.jaicore.ml.core.exception; /** * The {@link EvaluationException} indicates that an error occurred during a * evaluation process. Details concerning the error can be inferred from the * associated message. * * @author Julian Lienen * */ public class EvaluationException extends CheckedJaicoreMLException { /** * Generated serial version UID. */ private static final long serialVersionUID = -222252014216889955L; /** * Creates a new {@link EvaluationException} with the given parameters. * * @param message * The message of this {@link Exception}. */ public EvaluationException(String message) { super(message); } /** * Creates a new {@link EvaluationException} with the given parameters. * * @param message * The message of this {@link Exception}. * @param cause * The underlying cause of this {@link Exception}. */ public EvaluationException(String message, Throwable cause) { super(message, cause); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/InconsistentDataFormatException.java
package ai.libs.jaicore.ml.core.exception; public class InconsistentDataFormatException extends IllegalArgumentException { /** * */ private static final long serialVersionUID = -191627526519227789L; public InconsistentDataFormatException() { super(); } public InconsistentDataFormatException(final String msg) { super(msg); } public InconsistentDataFormatException(final String msg, final Throwable cause) { super(msg, cause); } public InconsistentDataFormatException(final Throwable cause) { super(cause); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/UncheckedJaicoreMLException.java
package ai.libs.jaicore.ml.core.exception; /** * The {@link UncheckedJaicoreMLException} serves as a base class for all unchecked {@link Exception}s defined as part of jaicore-ml. * * @author Alexander Hetzer * */ public abstract class UncheckedJaicoreMLException extends RuntimeException { private static final long serialVersionUID = 5949039077785112560L; /** * Creates a new {@link UncheckedJaicoreMLException} with the given parameters. * * @param message * The message of this {@link Exception}. * @param cause * The underlying cause of this {@link Exception}. */ public UncheckedJaicoreMLException(String message, Throwable cause) { super(message, cause); } /** * Creates a new {@link UncheckedJaicoreMLException} with the given parameters. * * @param message * The message of this {@link Exception}. */ public UncheckedJaicoreMLException(String message) { super(message); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/exception/package-info.java
/** * This package contains {@link java.lang.Exception}s defined by jaicore-ml. Most importantly it contains the two (abstract) base classes {@link ai.libs.jaicore.ml.core.exception.CheckedJaicoreMLException} * and {@link ai.libs.jaicore.ml.core.exception.UncheckedJaicoreMLException} which can be used to catch all exceptions emitted by classes which are part of jaicore-ml. * * @since 0.0.1 * */ package ai.libs.jaicore.ml.core.exception;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/FilterBasedDatasetSplitter.java
package ai.libs.jaicore.ml.core.filter; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.splitter.IDatasetSplitter; import org.api4.java.ai.ml.core.dataset.splitter.IFoldSizeConfigurableRandomDatasetSplitter; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.core.filter.unsupervised.sampling.ISamplingAlgorithm; import org.api4.java.common.control.ILoggingCustomizable; import org.api4.java.common.reconstruction.IReconstructible; import org.api4.java.common.reconstruction.IReconstructionInstruction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.reconstruction.ReconstructionInstruction; import ai.libs.jaicore.basic.reconstruction.ReconstructionUtil; import ai.libs.jaicore.ml.core.dataset.splitter.ReproducibleSplit; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; public class FilterBasedDatasetSplitter<D extends IDataset<?>> implements IDatasetSplitter<D>, IFoldSizeConfigurableRandomDatasetSplitter<D>, ILoggingCustomizable { private final ISamplingAlgorithmFactory<D, ?> samplerFactory; private final double relSampleSize; private final Random random; private Logger logger = LoggerFactory.getLogger(FilterBasedDatasetSplitter.class); public FilterBasedDatasetSplitter(final ISamplingAlgorithmFactory<D, ?> samplerFactory) { this(samplerFactory, Double.NaN, null); } public FilterBasedDatasetSplitter(final ISamplingAlgorithmFactory<D, ?> samplerFactory, final double relSampleSize, final Random random) { super(); this.samplerFactory = samplerFactory; this.relSampleSize = relSampleSize; this.random = random; } @Override public List<D> split(final D data) throws SplitFailedException, InterruptedException { if (this.random == null || Double.isNaN(this.relSampleSize)) { throw new IllegalStateException("The splitter has not been initialized with a random source and relative sample size configured. Provide these explicitly in the split method or in the initialization."); } return this.split(data, this.random, this.relSampleSize); } @Override public int getNumberOfFoldsPerSplit() { return 2; } @Override public List<D> split(final D data, final Random random, final double... relativeFoldSizes) throws SplitFailedException, InterruptedException { return getSplit(data, this.samplerFactory, random.nextLong(), this.logger, relativeFoldSizes); } public static <D extends IDataset<?>> List<D> getSplit(final D data, final ISamplingAlgorithmFactory<D, ?> samplerFactory, final long seed, final List<Double> relativeFoldSizes) throws InterruptedException, SplitFailedException { if (relativeFoldSizes.size() > 1) { return getSplit(data, samplerFactory, seed, relativeFoldSizes.get(0), relativeFoldSizes.get(1)); } else { return getSplit(data, samplerFactory, seed, relativeFoldSizes.get(0)); } } public static <D extends IDataset<?>> List<D> getSplit(final D data, final ISamplingAlgorithmFactory<D, ?> samplerFactory, final long seed, final double... relativeFoldSizes) throws InterruptedException, SplitFailedException { return getSplit(data, samplerFactory, seed, LoggerFactory.getLogger(FilterBasedDatasetSplitter.class), relativeFoldSizes); } public static <D extends IDataset<?>> List<D> getSplit(final D data, final ISamplingAlgorithmFactory<D, ?> samplerFactory, final long seed, final Logger logger, final double... relativeFoldSizes) throws InterruptedException, SplitFailedException { Objects.requireNonNull(data); if (data.isEmpty()) { throw new IllegalArgumentException("Cannot split empty dataset."); } if (relativeFoldSizes.length > 2 || relativeFoldSizes.length == 2 && relativeFoldSizes[0] + relativeFoldSizes[1] != 1) { throw new IllegalArgumentException("Invalid fold size specification " + Arrays.toString(relativeFoldSizes)); } if (data instanceof IReconstructible && !(samplerFactory instanceof IReconstructible)) { throw new IllegalStateException("Given data is reproducible and so should the splitters, but the sampler factory used to create the sampling algorithm is not reproducible."); } int size = (int) Math.round(data.size() * relativeFoldSizes[0]); logger.info("Drawing 2-fold split with size {} for the first fold.", size); ISamplingAlgorithm<D> sampler = samplerFactory.getAlgorithm(size, data, new Random(seed)); if (sampler instanceof ILoggingCustomizable) { ((ILoggingCustomizable) sampler).setLoggerName(logger.getName() + ".sampler"); } try { D firstFold = sampler.nextSample(); logger.debug("Sample for first fold completed, now computing the complement to fill the second fold."); D secondFold = sampler.getComplementOfLastSample(); logger.info("Fold creation completed. Adding reconstruction information."); if (data instanceof IReconstructible) { if (!ReconstructionUtil.areInstructionsNonEmptyIfReconstructibilityClaimed(data)) { logger.info("Not making the split reproducible since the original data is not reproducible."); return Arrays.asList(firstFold, secondFold); } else { List<Double> portionsAsList = new ArrayList<>(); for (double d : relativeFoldSizes) { portionsAsList.add(d); } List<IReconstructionInstruction> instructions = new ArrayList<>(((IReconstructible) data).getConstructionPlan().getInstructions()); // we create this copy to be safe of concurrent modifications of the instructions instructions.forEach(((IReconstructible)firstFold)::addInstruction); ReconstructionInstruction rInstForFirstFold = new ReconstructionInstruction(FilterBasedDatasetSplitter.class.getName(), "getFoldOfSplit", new Class<?>[] {IDataset.class, ISamplingAlgorithmFactory.class, long.class, int.class, List.class}, new Object[] {"this", samplerFactory, seed, 0, portionsAsList}); ((IReconstructible)firstFold).addInstruction(rInstForFirstFold); instructions.forEach(((IReconstructible)secondFold)::addInstruction); ReconstructionInstruction rInstForSecondFold = new ReconstructionInstruction(FilterBasedDatasetSplitter.class.getName(), "getFoldOfSplit", new Class<?>[] {IDataset.class, ISamplingAlgorithmFactory.class, long.class, int.class, List.class}, new Object[] {"this", samplerFactory, seed, 1, portionsAsList}); ((IReconstructible)secondFold).addInstruction(rInstForSecondFold); ReconstructionUtil.requireNonEmptyInstructionsIfReconstructibilityClaimed(firstFold); ReconstructionUtil.requireNonEmptyInstructionsIfReconstructibilityClaimed(secondFold); ReconstructionInstruction inst = new ReconstructionInstruction(FilterBasedDatasetSplitter.class.getName(), "getSplit", new Class<?>[] {IDataset.class, ISamplingAlgorithmFactory.class, long.class, List.class}, new Object[] {"this", samplerFactory, seed, Arrays.asList(relativeFoldSizes)}); logger.info("Sampling-based split completed, returning two folds of sizes {} and {}.", firstFold.size(), secondFold.size()); return new ReproducibleSplit<>(inst, data, firstFold, secondFold); } } logger.info("Sampling-based split completed, returning two folds of sizes {} and {}.", firstFold.size(), secondFold.size()); return Arrays.asList(firstFold, secondFold); } catch (DatasetCreationException e) { throw new SplitFailedException(e); } } public static <D extends IDataset<?>> D getFoldOfSplit(final D data, final ISamplingAlgorithmFactory<D, ?> samplerFactory, final long seed, final int fold, final List<Double> relativeFoldSizes) throws InterruptedException, SplitFailedException { return getSplit(data, samplerFactory, seed, relativeFoldSizes).get(fold); } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/IDatasetFilter.java
package ai.libs.jaicore.ml.core.filter; public interface IDatasetFilter { }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/SplitterUtil.java
package ai.libs.jaicore.ml.core.filter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.common.reconstruction.IReconstructible; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.reconstruction.ReconstructionInstruction; import ai.libs.jaicore.ml.core.dataset.splitter.ReproducibleSplit; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.LabelBasedStratifiedSamplingFactory; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.SimpleRandomSamplingFactory; public class SplitterUtil { private SplitterUtil() { /* avoids instantiation */ } public static <D extends ILabeledDataset<?>> List<D> getLabelStratifiedTrainTestSplit(final D dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, seed, relativeTrainSize, null); } public static <D extends ILabeledDataset<?>> List<D> getLabelStratifiedTrainTestSplit(final D dataset, final long seed, final double relativeTrainSize, final String loggerName) throws SplitFailedException, InterruptedException { Logger logger = LoggerFactory.getLogger(loggerName != null ? loggerName : SplitterUtil.class.getName()); boolean isReproducible = dataset instanceof IReconstructible; logger.info("Creating splitter"); FilterBasedDatasetSplitter<D> splitter = new FilterBasedDatasetSplitter<>(new LabelBasedStratifiedSamplingFactory<>(), relativeTrainSize, new Random(seed)); if (loggerName != null) { logger.info("Setting loggername of splitter to {}", loggerName); splitter.setLoggerName(loggerName); } List<D> folds = splitter.split(dataset); if (!isReproducible) { return folds; } try { ReconstructionInstruction instruction = new ReconstructionInstruction(SplitterUtil.class.getMethod("getLabelStratifiedTrainTestSplit", ILabeledDataset.class, long.class, double.class), "this", seed, relativeTrainSize); return new ReproducibleSplit<>(instruction, dataset, folds.get(0), folds.get(1)); // the folds themselves should be reconstructible already by the splitter } catch (NoSuchMethodException | SecurityException e) { throw new SplitFailedException(e); } } public static List<ILabeledDataset<?>> getLabelStratifiedTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, random.nextLong(), relativeTrainSize); } public static ILabeledDataset<?> getTrainFoldOfLabelStratifiedTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, random, relativeTrainSize).get(0); } public static ILabeledDataset<?> getTrainFoldOfLabelStratifiedTrainTestSplit(final ILabeledDataset<?> dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, seed, relativeTrainSize).get(0); } public static ILabeledDataset<?> getTestFoldOfLabelStratifiedTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, random, relativeTrainSize).get(1); } public static ILabeledDataset<?> getTestFoldOfLabelStratifiedTrainTestSplit(final ILabeledDataset<?> dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getLabelStratifiedTrainTestSplit(dataset, seed, relativeTrainSize).get(1); } public static List<ILabeledDataset<?>> getSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { boolean isReproducible = dataset instanceof IReconstructible; List<ILabeledDataset<?>> folds = getSimpleTrainTestSplit(dataset, new Random(seed), relativeTrainSize); if (!isReproducible) { return folds; } try { IReconstructible rDataset = (IReconstructible) dataset; IReconstructible trainFold = ((IReconstructible) folds.get(0)); IReconstructible testFold = ((IReconstructible) folds.get(1)); rDataset.getConstructionPlan().getInstructions().forEach(i -> { trainFold.addInstruction(i); testFold.addInstruction(i); }); trainFold.addInstruction(new ReconstructionInstruction(SplitterUtil.class.getMethod("getTrainFoldOfLabelStratifiedTrainTestSplit", ILabeledDataset.class, long.class, double.class), "this", seed, relativeTrainSize)); testFold.addInstruction(new ReconstructionInstruction(SplitterUtil.class.getMethod("getTestFoldOfLabelStratifiedTrainTestSplit", ILabeledDataset.class, long.class, double.class), "this", seed, relativeTrainSize)); ReconstructionInstruction instruction = new ReconstructionInstruction(SplitterUtil.class.getMethod("getLabelStratifiedTrainTestSplit", ILabeledDataset.class, long.class, double.class), "this", seed, relativeTrainSize); return new ReproducibleSplit<>(instruction, dataset, folds.get(0), folds.get(1)); } catch (NoSuchMethodException | SecurityException e) { throw new SplitFailedException(e); } } public static List<ILabeledDataset<?>> getSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return new FilterBasedDatasetSplitter<>(new SimpleRandomSamplingFactory<>(), relativeTrainSize, random).split(dataset); } public static ILabeledDataset<?> getTrainFoldOfSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getSimpleTrainTestSplit(dataset, random, relativeTrainSize).get(0); } public static ILabeledDataset<?> getTrainFoldOfSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getSimpleTrainTestSplit(dataset, seed, relativeTrainSize).get(0); } public static ILabeledDataset<?> getTestFoldOfSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final Random random, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getSimpleTrainTestSplit(dataset, random, relativeTrainSize).get(1); } public static ILabeledDataset<?> getTestFoldOfSimpleTrainTestSplit(final ILabeledDataset<?> dataset, final long seed, final double relativeTrainSize) throws SplitFailedException, InterruptedException { return getSimpleTrainTestSplit(dataset, seed, relativeTrainSize).get(1); } public static List<ILabeledDataset<ILabeledInstance>> getRealizationOfSplitSpecification(final ILabeledDataset<? extends ILabeledInstance> dataset, final Collection<? extends Collection<Integer>> splitSpec) throws DatasetCreationException, InterruptedException { List<ILabeledDataset<ILabeledInstance>> split = new ArrayList<>(splitSpec.size()); for (Collection<Integer> fold : splitSpec) { ILabeledDataset<ILabeledInstance> foldDataset = (ILabeledDataset<ILabeledInstance>) dataset.createEmptyCopy(); for (int index : fold) { foldDataset.add(dataset.get(index)); } split.add(foldDataset); } return split; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/package-info.java
/** * */ /** * @author mwever * */ package ai.libs.jaicore.ml.core.filter;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/IClusterableInstance.java
package ai.libs.jaicore.ml.core.filter.sampling; import org.apache.commons.math3.ml.clustering.Clusterable; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; public interface IClusterableInstance extends ILabeledInstance, Clusterable { }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/SampleElementAddedEvent.java
package ai.libs.jaicore.ml.core.filter.sampling; import org.api4.java.algorithm.IAlgorithm; import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent; public class SampleElementAddedEvent extends AAlgorithmEvent { public SampleElementAddedEvent(final IAlgorithm<?, ?> algorithm) { super(algorithm); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/package-info.java
/** * This package contains algorithms for creating samples of a dataset. * * @author wever * */ package ai.libs.jaicore.ml.core.filter.sampling;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/AFileSamplingAlgorithm.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.algorithm.AAlgorithm; import ai.libs.jaicore.basic.algorithm.EAlgorithmState; /** * An abstract class for file-based sampling algorithms providing basic * functionality of an algorithm. * * @author Lukas Brandt */ public abstract class AFileSamplingAlgorithm extends AAlgorithm<File, File> { private static final Logger LOG = LoggerFactory.getLogger(AFileSamplingAlgorithm.class); protected Integer sampleSize = null; private String outputFilePath = null; protected FileWriter outputFileWriter; protected AFileSamplingAlgorithm(final File input) { super(input); } public void setSampleSize(final int size) { this.sampleSize = size; } public void setOutputFileName(final String outputFilePath) throws IOException { this.outputFilePath = outputFilePath; this.outputFileWriter = new FileWriter(outputFilePath); } @Override public File call() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException, AlgorithmTimeoutedException { Instant timeoutTime = null; if (this.getTimeout().milliseconds() <= 0) { LOG.debug("Invalid or no timeout set. There will be no timeout in this algorithm run"); timeoutTime = Instant.MAX; } else { timeoutTime = Instant.now().plus(this.getTimeout().milliseconds(), ChronoUnit.MILLIS); if (LOG.isDebugEnabled()) { LOG.debug("Set timeout to {}", timeoutTime); } } // Check missing or invalid configuration. if (this.outputFilePath == null || this.outputFilePath.length() == 0) { throw new AlgorithmException("No output file path specified"); } if (this.sampleSize == null) { throw new AlgorithmException("No valid sample size specified"); } File dataset = this.getInput(); if (dataset == null || !dataset.exists() || !dataset.isFile()) { throw new AlgorithmException("No dataset file or an invalid dataset file was given as an input."); } // Working configuration, so create the actual sample. // Write the ARFF header to the output file. try { this.outputFileWriter.write(ArffUtilities.extractArffHeader(this.getInput())); } catch (IOException e) { throw new AlgorithmException("Error while writing to given output path.", e); } // Check if the requested sample size is zero and we can stop directly. if (this.sampleSize == 0) { LOG.warn("Sample size is 0, so an empty data set is returned!"); return new File(this.outputFilePath); } // Start the sampling process otherwise. this.setState(EAlgorithmState.CREATED); while (this.hasNext()) { try { this.checkAndConductTermination(); } catch (AlgorithmTimeoutedException e) { this.cleanUp(); throw new AlgorithmException(e.getMessage()); } if (Instant.now().isAfter(timeoutTime)) { LOG.warn("Algorithm is running even though it has been timeouted. Cancelling.."); this.cancel(); throw new AlgorithmException("Algorithm is running even though it has been timeouted"); } else { this.nextWithException(); } } try { this.outputFileWriter.flush(); this.outputFileWriter.close(); } catch (IOException e) { this.cleanUp(); throw new AlgorithmException("Could not close File writer for sampling output file", e); } this.cleanUp(); return new File(this.outputFilePath); } /** * Implement custom clean up behaviour. */ protected abstract void cleanUp(); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/ArffUtilities.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; /** * Utility class for handling Arff dataset files. * * @author Lukas Brandt */ public class ArffUtilities { private static final String DATA_TAG = "@data"; private ArffUtilities() { } /** * Extract the header of an ARFF file as a string. * * @param file * Given ARFF file of which the header shall be extracted. * @return Header of the given ARFF file. * @throws IOException * Could not read from the given file. */ public static String extractArffHeader(final File file) throws IOException { StringBuilder header = new StringBuilder(); try (BufferedReader bufferedReader = new BufferedReader(new FileReader(file))) { String line; boolean finishedHeader = false; while ((line = bufferedReader.readLine()) != null && !finishedHeader) { if (line.trim().equals("") || line.trim().charAt(0) == '%') { continue; } header.append(line.trim()); header.append("\n"); if (line.trim().equals(DATA_TAG)) { finishedHeader = true; } } } return header.toString(); } /** * Counts the amount of datapoint entries in an ARFF file. * * @param file * Given ARFF file where the entries are written in. * @param hasHeader * If true the count will start after an '@data' annotation, * otherwise it will just count every line, which is not a comment. * @return Amount of datapoint entries. * @throws IOException * Could not read from the given file. */ public static int countDatasetEntries(final File file, final boolean hasHeader) throws IOException { int result = 0; boolean startCounting = !hasHeader; try (BufferedReader bufferedReader = new BufferedReader(new FileReader(file))) { String line; while ((line = bufferedReader.readLine()) != null) { if (line.trim().equals("") || line.trim().charAt(0) == '%') { continue; } if (startCounting) { result++; } else { if (line.trim().equals(DATA_TAG)) { startCounting = true; } } } } return result; } /** * Skips with a given reader all comment lines and the header lines of an ARFF * file until the first datapoint is reached. * * @param reader * Reader that should be skipped to the data. * @throws IOException * Reader was not able to read the file. */ public static void skipWithReaderToDatapoints(final BufferedReader reader) throws IOException { String line; while ((line = reader.readLine()) != null) { if (line.trim().equals(DATA_TAG)) { return; } } } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/DatasetFileSorter.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.Comparator; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.common.control.ICancelable; import ai.libs.jaicore.basic.TempFileHandler; /** * Sorts a Dataset file with a Mergesort. A TempFileHandler can be given or a * new one will be created otherwise. * * @author Lukas Brandt */ public class DatasetFileSorter implements ICancelable { private File datasetFile; private TempFileHandler tempFileHandler; private boolean usesOwnTempFileHandler; private boolean canceled; // Default comperator, which compared the single features as strings private Comparator<String> comparator = (s1, s2) -> { String[] f1 = s1.trim().split(","); String[] f2 = s2.trim().split(","); int l = Math.min(f1.length, f2.length); for (int i = 0; i < l; i++) { int c = f1[i].compareTo(f2[i]); if (c != 0) { return c; } } return 0; }; public DatasetFileSorter(final File datasetFile, final TempFileHandler tempFileHandler) { this.datasetFile = datasetFile; if (!datasetFile.exists()) { throw new IllegalArgumentException("Cannot sort items of non-existent file " + datasetFile); } this.tempFileHandler = tempFileHandler; this.usesOwnTempFileHandler = false; } public DatasetFileSorter(final File datasetFile) { this(datasetFile, new TempFileHandler()); this.usesOwnTempFileHandler = true; } /** * @param comparator * Custom comparator for the dataset file lines. */ public void setComparator(final Comparator<String> comparator) { this.comparator = comparator; } /** * * @param sortedFilePath * @return * @throws IOException * @throws InterruptedException * @throws AlgorithmExecutionCanceledException */ public File sort(final String sortedFilePath) throws IOException, InterruptedException, AlgorithmExecutionCanceledException { IOException exception; try (FileWriter fileWriter = new FileWriter(new File(sortedFilePath)); FileReader fr = new FileReader(this.datasetFile); BufferedReader datasetFileReader = new BufferedReader(fr)) { // Create a new file for the sorted dataset with the ARFF header String arffHeader; arffHeader = ArffUtilities.extractArffHeader(this.datasetFile); fileWriter.write(arffHeader); // Create a temp file with all datapoints String tempFileUUID = this.tempFileHandler.createTempFile().getName(); FileWriter tempFileWriter = this.tempFileHandler.getFileWriterForTempFile(tempFileUUID); String dataPointLine; boolean datastarted = false; while ((dataPointLine = datasetFileReader.readLine()) != null) { if (dataPointLine.trim().equals("") || dataPointLine.trim().charAt(0) == '%') { continue; } if (datastarted) { tempFileWriter.write(dataPointLine.trim() + "\n"); } else { if (dataPointLine.trim().equals("@data")) { datastarted = true; } } } tempFileWriter.flush(); // Sort the temp file String sortedFileUUID = this.mergesort(tempFileUUID); // Write the sorted lines from the temp file to the output file. BufferedReader sortedReader = this.tempFileHandler.getFileReaderForTempFile(sortedFileUUID); String line; while ((line = sortedReader.readLine()) != null) { fileWriter.write(line + "\n"); } fileWriter.flush(); return new File(sortedFilePath); } catch (IOException e) { exception = e; } finally { // Start clean up of the temporary file handler if a new one was used for this // sorting. if (this.usesOwnTempFileHandler) { this.tempFileHandler.close(); } } throw exception; } private String mergesort(final String fileUUID) throws IOException, InterruptedException, AlgorithmExecutionCanceledException { int length = ArffUtilities.countDatasetEntries(this.tempFileHandler.getTempFile(fileUUID), false); if (length <= 1) { return fileUUID; } else { // Split the existing file into two halfs String leftUUID = this.tempFileHandler.createTempFile().getName(); String rightUUID = this.tempFileHandler.createTempFile().getName(); try (FileWriter leftWriter = this.tempFileHandler.getFileWriterForTempFile(leftUUID)) { try (FileWriter rightWriter = this.tempFileHandler.getFileWriterForTempFile(rightUUID)) { try (BufferedReader reader = this.tempFileHandler.getFileReaderForTempFile(fileUUID)) { int i = 0; String line; while ((line = reader.readLine()) != null) { if (i % 100 == 0 && Thread.interrupted()) { throw new InterruptedException(); } if (i < (length / 2)) { leftWriter.write(line + "\n"); } else { rightWriter.write(line + "\n"); } i++; } if (Thread.interrupted()) { throw new InterruptedException(); } leftWriter.flush(); rightWriter.flush(); // Sort the two halfs String sortedLeftUUID = this.mergesort(leftUUID); String sortedRightUUID = this.mergesort(rightUUID); // Merge the sorted halfs back together ande delete the left and right temp files if (Thread.interrupted()) { throw new InterruptedException(); } if (this.canceled) { throw new AlgorithmExecutionCanceledException(0); } String mergedFileUUID = this.merge(sortedLeftUUID, sortedRightUUID); this.tempFileHandler.deleteTempFile(leftUUID); this.tempFileHandler.deleteTempFile(rightUUID); return mergedFileUUID; } } } } } private String merge(final String leftUUID, final String rightUUID) throws IOException { String uuid = this.tempFileHandler.createTempFile().getName(); try (FileWriter writer = this.tempFileHandler.getFileWriterForTempFile(uuid)) { try (BufferedReader leftReader = this.tempFileHandler.getFileReaderForTempFile(leftUUID)) { try (BufferedReader rightReader = this.tempFileHandler.getFileReaderForTempFile(rightUUID)) { String leftLine = leftReader.readLine(); String rightLine = rightReader.readLine(); while (leftLine != null || rightLine != null) { if (leftLine == null) { writer.write(rightLine + "\n"); rightLine = rightReader.readLine(); } else if (rightLine == null) { writer.write(leftLine + "\n"); leftLine = leftReader.readLine(); } else { int c = this.comparator.compare(leftLine, rightLine); if (c > 0) { writer.write(rightLine + "\n"); rightLine = rightReader.readLine(); } else { writer.write(leftLine + "\n"); leftLine = leftReader.readLine(); } } } writer.flush(); } } } return uuid; } @Override public void cancel() { this.canceled = true; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/ReservoirSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Random; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; /** * Implementation of the Reservoir Sampling algorithm(comparable to a Simple * Random Sampling for streamed data). For a desired sample of size n, the first * n elements of the stream are directly selected and the remaining ones will * replace these with decreasing probability. * * @author Lukas Brandt */ public class ReservoirSampling extends AFileSamplingAlgorithm { private Random random; private BufferedReader reader; private int datapointAmount; private int streamedDatapoints; private String[] sampledDatapoints; public ReservoirSampling(final Random random, final File input) { super(input); this.random = random; } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException { switch (this.getState()) { case CREATED: // Initialize variables. try { this.datapointAmount = ArffUtilities.countDatasetEntries(this.getInput(), true); this.streamedDatapoints = 0; this.sampledDatapoints = new String[this.sampleSize]; this.reader = new BufferedReader(new FileReader(this.getInput())); ArffUtilities.skipWithReaderToDatapoints(this.reader); return this.activate(); } catch (IOException e) { throw new AlgorithmException("Was not able to count the datapoints.", e); } case ACTIVE: if (this.streamedDatapoints < this.datapointAmount) { try { // Get current datapoint. String datapoint = this.reader.readLine(); if (datapoint != null && datapoint.trim().length() > 0 && datapoint.trim().charAt(0) != '%') { if (this.streamedDatapoints < this.sampleSize) { // Take the first n elements directly for the sample. this.sampledDatapoints[this.streamedDatapoints] = datapoint.trim(); } else { // Replace elements with decreasing probability. int j = this.random.nextInt(this.streamedDatapoints); if (j < this.sampleSize) { this.sampledDatapoints[j] = datapoint.trim(); } } } this.streamedDatapoints++; return new SampleElementAddedEvent(this); } catch (IOException e) { throw new AlgorithmException("Was not able to read datapoint line from input file", e); } } else { try { this.reader.close(); // Write sampled datapoints into output file and terminate. for (int i = 0; i < this.sampledDatapoints.length; i++) { this.outputFileWriter.write(this.sampledDatapoints[i] + "\n"); } return this.terminate(); } catch (IOException e) { throw new AlgorithmException("Was not able to write sampled datapoints into output files.", e); } } case INACTIVE: if (this.streamedDatapoints < this.datapointAmount) { throw new AlgorithmException("Expected sample size was not reached before termination"); } else { return this.terminate(); } default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } @Override protected void cleanUp() { /* no clean up activities necessary */ } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/SystematicFileSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import org.api4.java.algorithm.Timeout; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.basic.TempFileHandler; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; import ai.libs.jaicore.timing.TimedComputation; /** * File-level implementation of Systematic Sampling: Sort datapoints and pick * every k-th datapoint for the sample. * * @author Lukas Brandt */ public class SystematicFileSampling extends AFileSamplingAlgorithm { private Random random; private int index; private int addedDatapoints; private TempFileHandler tempFileHandler; private Comparator<String> datapointComparator; private BufferedReader sortedDatasetFileReader; private List<Integer> indicesForSelection; private DatasetFileSorter sorter; // this is an object variable in order to be cancelable /** * Simple constructor that uses the default datapoint comparator. * * @param random * Random Object for determining the sampling start point. */ public SystematicFileSampling(final Random random, final File input) { this(random, null, input); } /** * Constructor for a custom datapoint comparator. * * @param random * Random Object for determining the sampling start point. * @param datapointComparator * Comparator to sort the dataset. */ public SystematicFileSampling(final Random random, final Comparator<String> datapointComparator, final File input) { super(input); this.random = random; this.datapointComparator = datapointComparator; this.tempFileHandler = new TempFileHandler(); } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException, AlgorithmTimeoutedException { switch (this.getState()) { case CREATED: // Sort dataset and skip with reader the ARFF header. File sortedDatasetFile = null; try { this.sorter = new DatasetFileSorter(this.getInput(), this.tempFileHandler); if (this.datapointComparator != null) { this.sorter.setComparator(this.datapointComparator); } this.setDeadline(); long remainingMS = this.getRemainingTimeToDeadline().milliseconds() - this.getTimeoutPrecautionOffset(); sortedDatasetFile = TimedComputation.compute(() -> this.sorter.sort(this.tempFileHandler.getTempFileDirPath() + File.separator + UUID.randomUUID().toString()), new Timeout(remainingMS, TimeUnit.MILLISECONDS), "No time left"); sortedDatasetFile.deleteOnExit(); this.sortedDatasetFileReader = new BufferedReader(new FileReader(sortedDatasetFile)); ArffUtilities.skipWithReaderToDatapoints(this.sortedDatasetFileReader); } catch (IOException | ExecutionException e) { if (e.getCause() instanceof AlgorithmExecutionCanceledException) { throw (AlgorithmExecutionCanceledException) e.getCause(); } throw new AlgorithmException("Was not able to create a sorted dataset file.", e); } // Count datapoints in the sorted dataset and initialize variables. try { this.addedDatapoints = 0; this.index = 0; int datapointAmount = ArffUtilities.countDatasetEntries(sortedDatasetFile, true); this.indicesForSelection = new LinkedList<>(); int k = datapointAmount / this.sampleSize; int startIndex = this.random.nextInt(datapointAmount); int i = 0; while (this.indicesForSelection.size() < this.sampleSize) { if (i % 100 == 0) { this.checkAndConductTermination(); } int e = (startIndex + k * (i++)) % datapointAmount; this.indicesForSelection.add(e); } this.indicesForSelection.sort(Integer::compare); return this.activate(); } catch (IOException e) { throw new AlgorithmException("Was not able to count the datapoints.", e); } case ACTIVE: // If the sample size is not reached yet, add the next datapoint from the // systematic sampling method. if (this.addedDatapoints < this.sampleSize) { try { if (this.addedDatapoints % 100 == 0) { this.checkAndConductTermination(); } // Determine and find the next k-th element. int e = this.indicesForSelection.get(this.addedDatapoints); String datapoint = this.sortedDatasetFileReader.readLine(); this.index++; while (this.index < e) { if (this.index % 100 == 0) { this.checkAndConductTermination(); } datapoint = this.sortedDatasetFileReader.readLine(); this.index++; } // Add this datapoint to the output file. assert datapoint != null; this.outputFileWriter.write(datapoint + "\n"); this.addedDatapoints++; return new SampleElementAddedEvent(this); } catch (IOException e) { throw new AlgorithmException("Was not able to read from sorted dataset file.", e); } } else { // Delete sorted dataset file and terminate try { this.sortedDatasetFileReader.close(); } catch (IOException e) { throw new AlgorithmException("Could not closed dataset file reader", e); } this.cleanUp(); return this.terminate(); } case INACTIVE: this.cleanUp(); if (this.addedDatapoints < this.sampleSize) { throw new AlgorithmException("Expected sample size was not reached before termination"); } else { return this.terminate(); } default: this.cleanUp(); throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } @Override public void cancel() { super.cancel(); this.sorter.cancel(); } @Override protected void cleanUp() { this.tempFileHandler.cleanUp(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified/sampling/ClassStratiFileAssigner.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles.stratified.sampling; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.api4.java.algorithm.exceptions.AlgorithmException; import ai.libs.jaicore.basic.TempFileHandler; public class ClassStratiFileAssigner implements IStratiFileAssigner { private TempFileHandler tempFileHandler; private String arffHeader; private int targetAttribute; private Map<String, String> classToStratumMapping; private Map<String, Integer> stratiSizes; /** * Constructor with a given target attribute. * * @param targetAttribute Index of the target attribute. */ public ClassStratiFileAssigner(final int targetAttribute) { this.targetAttribute = targetAttribute; this.classToStratumMapping = new HashMap<>(); this.stratiSizes = new HashMap<>(); } /** * Constructor without a given target attribute. The last feature will be used. */ public ClassStratiFileAssigner() { this.targetAttribute = -1; this.classToStratumMapping = new HashMap<>(); this.stratiSizes = new HashMap<>(); } @Override public void setTempFileHandler(final TempFileHandler tempFileHandler) { this.tempFileHandler = tempFileHandler; } @Override public void setArffHeader(final String arffHeader) { this.arffHeader = arffHeader; } @Override public void assignDatapoint(final String datapoint) throws AlgorithmException { String c; String[] features = datapoint.split(","); if (this.targetAttribute == -1) { c = features[features.length - 1]; } else { c = features[this.targetAttribute]; } String uuid; if (!this.classToStratumMapping.containsKey(c)) { try { uuid = this.createNewStratumFile(c); } catch (IOException e) { throw new AlgorithmException("Was not able to create a new temporary file for a stratum.", e); } } else { uuid = this.classToStratumMapping.get(c); } try { this.tempFileHandler.getFileWriterForTempFile(uuid).write(datapoint + "\n"); this.tempFileHandler.getFileWriterForTempFile(uuid).flush(); this.stratiSizes.put(uuid, this.stratiSizes.get(uuid) + 1); } catch (IOException e) { throw new AlgorithmException("Was not able to write the datapoint into the corresponding stratum file.", e); } } @Override public Map<String, Integer> getAllCreatedStrati() { return this.stratiSizes; } private String createNewStratumFile(final String c) throws IOException { String uuid = this.tempFileHandler.createTempFile().getName(); this.tempFileHandler.getFileWriterForTempFile(uuid).write(this.arffHeader); this.tempFileHandler.getFileWriterForTempFile(uuid).flush(); this.classToStratumMapping.put(c, uuid); this.stratiSizes.put(uuid, 0); return uuid; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified/sampling/IStratiFileAssigner.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles.stratified.sampling; import java.util.Map; import org.api4.java.algorithm.exceptions.AlgorithmException; import ai.libs.jaicore.basic.TempFileHandler; /** * Interface to implement custom Stratum assignment behavior. A temporary file * for each Stratum has to be created and the corresponding datapoints written * into it. Each temporary file has to be a valid subset of the input file, i.e * the ARFF header has to be written at the top of the file. * * @author Lukas Brandt */ public interface IStratiFileAssigner { /** * Set the temporary file handler, which will be used to manage the temporary * files for the strati. * * @param tempFileHandler Temporary File Handler to manage the files. */ public void setTempFileHandler(TempFileHandler tempFileHandler); /** * Set the header of the original ARFF input file. It has to be written on top * of each temporary file. Besides of that it can be used to extract meta data * about the dataset if needed. * * @param arffHeader ARFF header lines as a string. */ public void setArffHeader(String arffHeader); /** * Select the suitable stratum for a datapoint and write it into the * corresponding temporary file. * * @param datapoint String representation of the datapoint taken from the input * file. * @throws AlgorithmException The datapoint representation was invalid or it * could not be assigned to a stratum. */ public void assignDatapoint(String datapoint) throws AlgorithmException; /** * Get the used strati temporary files and the amount of datapoints inside of * it. * * @return Mapping from UUID of the temporary file of a strati to the number of * datapoints written into it. */ public Map<String, Integer> getAllCreatedStrati(); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/infiles/stratified/sampling/StratifiedFileSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.infiles.stratified.sampling; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.TempFileHandler; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; import ai.libs.jaicore.ml.core.filter.sampling.infiles.AFileSamplingAlgorithm; import ai.libs.jaicore.ml.core.filter.sampling.infiles.ArffUtilities; import ai.libs.jaicore.ml.core.filter.sampling.infiles.ReservoirSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.WaitForSamplingStepEvent; public class StratifiedFileSampling extends AFileSamplingAlgorithm { private Logger logger = LoggerFactory.getLogger(StratifiedFileSampling.class); private Random random; private TempFileHandler tempFileHandler; private BufferedReader reader; private IStratiFileAssigner assigner; private int datapointAmount; private int streamedDatapoints; private boolean stratiSamplingStarted; private boolean stratiSamplingFinished; private ExecutorService executorService; private List<String> sample; /** * Constructor for a Stratified File Sampler. * * @param random * Random object for sampling inside of the strati. * @param stratiFileAssigner * Assigner for datapoints to strati. */ public StratifiedFileSampling(final Random random, final IStratiFileAssigner stratiFileAssigner, final File input) { super(input); this.random = random; this.assigner = stratiFileAssigner; this.tempFileHandler = new TempFileHandler(); } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException, AlgorithmTimeoutedException { switch (this.getState()) { case CREATED: // Initialize variables. try { this.assigner.setArffHeader(ArffUtilities.extractArffHeader(this.getInput())); this.assigner.setTempFileHandler(this.tempFileHandler); this.datapointAmount = ArffUtilities.countDatasetEntries(this.getInput(), true); this.streamedDatapoints = 0; this.stratiSamplingStarted = false; this.stratiSamplingFinished = false; this.sample = new LinkedList<>(); this.reader = new BufferedReader(new FileReader(this.getInput())); this.executorService = Executors.newCachedThreadPool(); ArffUtilities.skipWithReaderToDatapoints(this.reader); return this.activate(); } catch (IOException e) { throw new AlgorithmException("Was not able to count the datapoints.", e); } case ACTIVE: if (this.streamedDatapoints % 100 == 0) { this.checkAndConductTermination(); } if (this.streamedDatapoints < this.datapointAmount) { try { // Assign each datapoint to a stratum. String datapoint = this.reader.readLine(); if (datapoint != null && datapoint.trim().length() > 0 && datapoint.trim().charAt(0) != '%') { this.assigner.assignDatapoint(datapoint); } this.streamedDatapoints++; return new SampleElementAddedEvent(this); } catch (IOException e) { throw new AlgorithmException("Was not able to read datapoint line form input file", e); } } else { this.logger.debug("All datapoints are assigned, now sampling from strati."); try { this.reader.close(); } catch (IOException e) { throw new AlgorithmException("Was not able to close input file reader.", e); } if (!this.stratiSamplingStarted) { // Start Reservoir Sampling inside the strati. this.stratiSamplingStarted = true; this.startReservoirSamplingForStrati(this.assigner.getAllCreatedStrati()); return new WaitForSamplingStepEvent(this); } else { if (!this.stratiSamplingFinished) { // Check if all threads for sampling inside the strati are finished. If no, wait // shortly in this step. if (this.executorService.isTerminated()) { this.stratiSamplingFinished = true; } else { Thread.sleep(100); } return new WaitForSamplingStepEvent(this); } else { // Write strati sampling results to the outputand terminate. try { if (this.sample.size() != this.sampleSize) { throw new IllegalStateException("Will write " + this.sample.size() + " instead of " + this.sampleSize + " instances."); } for (int i = 0; i < this.sample.size(); i++) { if (i % 100 == 0) { this.checkAndConductTermination(); } this.outputFileWriter.write(this.sample.get(i) + "\n"); } return this.terminate(); } catch (IOException e) { throw new AlgorithmException("Was not able to write datapoint into output file.", e); } } } } case INACTIVE: if (this.streamedDatapoints < this.datapointAmount || !this.stratiSamplingStarted || !this.stratiSamplingFinished) { throw new AlgorithmException("Expected sample size was not reached before termination"); } else { return this.terminate(); } default: this.cleanUp(); throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } @Override protected void cleanUp() { this.executorService.shutdownNow(); this.tempFileHandler.cleanUp(); } /** * Calculates the necessary sample sizes and start a Simple Random Sampling * Thread for each stratum. */ private void startReservoirSamplingForStrati(final Map<String, Integer> strati) { this.logger.info("Start reservoir sampling for strati."); // Calculate the amount of datapoints that will be used from each strati int[] sampleSizeForStrati = new int[strati.keySet().size()]; // Calculate for each stratum the sample size by StratiSize / DatasetSize int i = 0; int numOfSamplesThatWillBeCreated = 0; List<Integer> fillupStrati = new ArrayList<>(); // strati to fill up rounding instances for (Entry<String, Integer> entry : strati.entrySet()) { sampleSizeForStrati[i] = (int) Math.floor((float) (this.sampleSize * ((double) strati.get(entry.getKey()) / (double) this.datapointAmount))); numOfSamplesThatWillBeCreated += sampleSizeForStrati[i]; fillupStrati.add(i); i++; } while (numOfSamplesThatWillBeCreated < this.sampleSize) { Collections.shuffle(fillupStrati, this.random); int indexForNextFillUp = fillupStrati.remove(0); sampleSizeForStrati[indexForNextFillUp]++; numOfSamplesThatWillBeCreated++; } if (numOfSamplesThatWillBeCreated != this.sampleSize) { throw new IllegalStateException("The strati sum up to a size of " + numOfSamplesThatWillBeCreated + " instead of " + this.sampleSize + "."); } // Start a Reservoir Sampling thread for each stratum i = 0; for (Entry<String, Integer> entry : strati.entrySet()) { final int index = i; this.executorService.execute(() -> { String outputFile = this.tempFileHandler.createTempFile().getName(); ReservoirSampling reservoirSampling = new ReservoirSampling(this.random, this.tempFileHandler.getTempFile(entry.getKey())); reservoirSampling.setSampleSize(sampleSizeForStrati[index]); try { reservoirSampling.setOutputFileName(this.tempFileHandler.getTempFile(outputFile).getAbsolutePath()); reservoirSampling.call(); BufferedReader bufferedReader = this.tempFileHandler.getFileReaderForTempFile(outputFile); ArffUtilities.skipWithReaderToDatapoints(bufferedReader); String line; while ((line = bufferedReader.readLine()) != null) { if (!(line.trim().equals("") || line.trim().charAt(0) == '%')) { synchronized (this.sample) { this.sample.add(line); } } } } catch (Exception e) { this.logger.error("Unexpected exception during reservoir sampling!", e); if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } } }); i++; } // Prevent executor service from more threads being added. this.executorService.shutdown(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/ASamplingAlgorithm.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.core.filter.unsupervised.sampling.ISamplingAlgorithm; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.algorithm.AAlgorithm; import ai.libs.jaicore.basic.algorithm.EAlgorithmState; /** * An abstract class for sampling algorithms providing basic functionality of an * algorithm. * * @author wever * @author Lukas Brandt * @author Felix Weiland * @author jnowack */ public abstract class ASamplingAlgorithm<D extends IDataset<?>> extends AAlgorithm<D, D> implements ISamplingAlgorithm<D> { private Logger logger = LoggerFactory.getLogger(ASamplingAlgorithm.class); protected int sampleSize = -1; protected D sample = null; private class Caps<I extends IInstance> { private IDataset<I> cloneOfOriginal; // this is also a D (so casts are admissible) private IDataset<I> dataForSample; // this is also a D (so casts are admissible) public Caps(final IDataset<I> clone) { super(); this.cloneOfOriginal = clone; } private void computeSample() throws AlgorithmException, AlgorithmTimeoutedException, InterruptedException, AlgorithmExecutionCanceledException { // Check missing or invalid configuration. if (ASamplingAlgorithm.this.sampleSize == -1) { throw new AlgorithmException("No valid sample size specified"); } if (ASamplingAlgorithm.this.sampleSize == 0) { ASamplingAlgorithm.this.logger.warn("Sample size is 0, so an empty data set is returned!"); try { this.dataForSample = (IDataset<I>) ASamplingAlgorithm.this.getInput().createEmptyCopy(); return; } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create a copy of the dataset.", e); } } D dataset = ASamplingAlgorithm.this.getInput(); if (dataset == null || dataset.isEmpty()) { throw new AlgorithmException("No dataset or an empty dataset was given as an input."); } if (dataset.size() < ASamplingAlgorithm.this.sampleSize) { throw new AlgorithmException("Specified sample size is bigger than the dataset. Sample should have size " + ASamplingAlgorithm.this.sampleSize + " but has " + dataset.size()); } else if (dataset.size() == ASamplingAlgorithm.this.sampleSize) { ASamplingAlgorithm.this.logger.warn("Sample size and data set size are equal. Returning the original data set"); // The dataset size is exactly the specified sample size, so just return the whole dataset. this.dataForSample = (IDataset<I>) dataset; } else { // Working configuration, so create the actual sample. ASamplingAlgorithm.this.setState(EAlgorithmState.CREATED); ASamplingAlgorithm.this.setDeadline(); ASamplingAlgorithm.this.logger.info("Now running actual sample routine. Timeout is {}. Remaining time: {}", ASamplingAlgorithm.this.getTimeout(), ASamplingAlgorithm.this.getRemainingTimeToDeadline()); int i = 0; while (ASamplingAlgorithm.this.hasNext()) { if (i++ % 100 == 0) { ASamplingAlgorithm.this.checkAndConductTermination(); } ASamplingAlgorithm.this.nextWithException(); } this.dataForSample = (IDataset<I>) ASamplingAlgorithm.this.sample; } } public IDataset<I> getComplement() throws DatasetCreationException, InterruptedException { return new SampleComplementComputer().getComplement(this.cloneOfOriginal, this.dataForSample); } } private final Caps<?> caps; protected ASamplingAlgorithm(final D input) { this(input, (Class<? extends IInstance>) input.get(0).getClass()); } protected <I extends IInstance> ASamplingAlgorithm(final D input, final Class<I> instanceClass) { super(input); IDataset<I> dsCopy = (IDataset<I>) input; if (!instanceClass.isInstance(input.get(0))) { throw new IllegalArgumentException("The class " + instanceClass.getName() + " is not a valid cast for the given dataset."); } this.caps = new Caps<>(dsCopy); } public void setSampleSize(final int size) { this.sampleSize = size; } public void setSampleSize(final double relativeSize) { if (relativeSize <= 0 || relativeSize >= 1) { throw new IllegalArgumentException("Illegal relative sample size " + relativeSize + ". Must be between 0 and 1 (both exclusive)."); } this.setSampleSize((int)Math.round(this.getInput().size() * relativeSize)); } @SuppressWarnings("unchecked") @Override public D call() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmException, AlgorithmTimeoutedException { this.caps.computeSample(); return (D) this.caps.dataForSample; } protected IAlgorithmEvent doInactiveStep() throws AlgorithmException { if (this.sample.size() < this.sampleSize) { throw new AlgorithmException("Expected sample size was not reached before termination. Current sample size is " + this.sample.size()); } else { return this.terminate(); } } @Override public D nextSample() throws InterruptedException, DatasetCreationException { try { return this.call(); } catch (AlgorithmTimeoutedException | AlgorithmExecutionCanceledException | AlgorithmException e) { throw new DatasetCreationException(e); } } /** * Gets the data point contained in the original data that are not part of the * * @return * @throws DatasetCreationException * @throws InterruptedException */ @Override public D getComplementOfLastSample() throws DatasetCreationException, InterruptedException { return (D) this.caps.getComplement(); } public int getSampleSize() { return this.sampleSize; } @Override public void setLoggerName(final String loggerName) { this.logger = LoggerFactory.getLogger(loggerName); } @Override public String getLoggerName() { return this.logger.getName(); } protected Logger getLogger() { return this.logger; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/ClusterSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import java.util.List; import org.apache.commons.math3.ml.clustering.CentroidCluster; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.ml.distance.ManhattanDistance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; public abstract class ClusterSampling<I extends ILabeledInstance & Clusterable, D extends ILabeledDataset<I>> extends ASamplingAlgorithm<D> { protected List<CentroidCluster<I>> clusterResults = null; protected int currentCluster = 0; protected DistanceMeasure distanceMeassure = new ManhattanDistance(); protected long seed; protected ClusterSampling(final long seed, final D input) { super(input); this.seed = seed; } protected ClusterSampling(final long seed, final DistanceMeasure dist, final D input) { super(input); this.seed = seed; this.distanceMeassure = dist; } public List<CentroidCluster<I>> getClusterResults() { return this.clusterResults; } public void setClusterResults(final List<CentroidCluster<I>> clusterResults) { this.clusterResults = clusterResults; } public void setDistanceMeassure(final DistanceMeasure distanceMeassure) { this.distanceMeassure = distanceMeassure; } public IAlgorithmEvent doAlgorithmStep() throws AlgorithmTimeoutedException, InterruptedException, AlgorithmExecutionCanceledException { if (this.currentCluster < this.clusterResults.size()) { CentroidCluster<I> cluster = this.clusterResults.get(this.currentCluster++); boolean same = true; int n = cluster.getPoints().size(); for (int i = 1; i < n; i++) { if (i % 1000 == 0) { this.checkAndConductTermination(); } if (!cluster.getPoints().get(i - 1).getLabel().equals(cluster.getPoints().get(i).getLabel())) { same = false; break; } } if (same) { I near = cluster.getPoints().get(0); double dist = Double.MAX_VALUE; for (I p : cluster.getPoints()) { double newDist = this.distanceMeassure.compute(p.getPoint(), cluster.getCenter().getPoint()); if (newDist < dist) { near = p; dist = newDist; } } this.sample.add(near); } else { // find a solution to not sample all points here for (int i = 0; i < cluster.getPoints().size(); i++) { this.sample.add(cluster.getPoints().get(i)); } } return new SampleElementAddedEvent(this); } else { return this.terminate(); } } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/GmeansSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.ml.distance.ManhattanDistance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import ai.libs.jaicore.ml.clustering.learner.GMeans; import ai.libs.jaicore.ml.core.filter.sampling.IClusterableInstance; /** * Implementation of a sampling method using gmeans-clustering. This algorithm * produces clusters of the given points and checks weather all points in a * cluster have the same target Attribute. If yes only the point nearest to the * center is added, otherwise the whole cluster is added to the sample. * <p> * Caution: This does ignore the given sample size! * * @author jnowack * */ public class GmeansSampling<I extends IClusterableInstance, D extends ILabeledDataset<I>> extends ClusterSampling<I, D> { private final int maxIterationsInInnerLoop; public GmeansSampling(final int maxIterationsInInnerLoop, final long seed, final DistanceMeasure dist, final D input) { super(seed, dist, input); if (input.size() > 1000) { throw new IllegalArgumentException("GMeans does not support datasets with more than 1000 points, because it has quadratic (non-interruptible) runtime."); } this.maxIterationsInInnerLoop = maxIterationsInInnerLoop; } public GmeansSampling(final long seed, final DistanceMeasure dist, final D input) { this(-1, seed, dist, input); } public GmeansSampling(final int maxIterationsInInnerLoop, final long seed, final D input) { this(maxIterationsInInnerLoop, seed, new ManhattanDistance(), input); } public GmeansSampling(final long seed, final D input) { this(-1, seed, input); } @SuppressWarnings("unchecked") @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { Logger logger = this.getLogger(); switch (this.getState()) { case CREATED: // Initialize variables try { this.sample = (D) this.getInput().createEmptyCopy(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create a copy of the dataset.", e); } if (this.clusterResults == null) { // create cluster logger.debug("Create clustering."); GMeans<I> gMeansCluster = new GMeans<>(this.getInput(), this.distanceMeassure, this.maxIterationsInInnerLoop, this.seed); this.clusterResults = gMeansCluster.cluster(); // this is not interruptible!! } return this.activate(); case ACTIVE: return this.doAlgorithmStep(); default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/KmeansSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.clustering.KMeansPlusPlusClusterer; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.random.JDKRandomGenerator; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; /** * Implementation of a sampling method using kmeans-clustering. This algorithm * produces clusters of the given points and checks weather all points in a * cluster have the same target Attribute. If yes only the point nearest to the * center is added, otherwise the whole cluster is added to the sample. * <p> * Caution: This does ignore the given sample size! * * @author jnowack * */ public class KmeansSampling<I extends ILabeledInstance & Clusterable, D extends ILabeledDataset<I>> extends ClusterSampling<I, D> { /* number of clusters, if -1 use sample size */ private final int k; private final int maxIterations; /** * Implementation of a sampling method using kmeans-clustering. * * @param seed * RAndom Seed * @param k * number of clusters */ public KmeansSampling(final long seed, final int k, final int maxIterations, final D input) { super(seed, input); this.k = k; this.maxIterations = maxIterations; if (input.size() > 1000) { throw new IllegalArgumentException("KMeansSampling does not support datasets with more than 1000 points, because it has quadratic (non-interruptible) runtime."); } } /** * Implementation of a sampling method using kmeans-clustering. The sample size * will be used as the number of clusters. * * @param seed * Random Seed * @param dist * {@link DistanceMeasure} to be used */ public KmeansSampling(final int maxIterations, final long seed, final DistanceMeasure dist, final D input) { super(seed, dist, input); this.maxIterations = maxIterations; this.k = -1; if (input.size() > 1000) { throw new IllegalArgumentException("KMeansSampling does not support datasets with more than 1000 points, because it has quadratic (non-interruptible) runtime."); } } /** * Implementation of a sampling method using kmeans-clustering. * * @param seed * Random Seed * @param k * number of clusters * @param dist * {@link DistanceMeasure} to be used */ public KmeansSampling(final int maxIterations, final long seed, final int k, final DistanceMeasure dist, final D input) { super(seed, dist, input); this.maxIterations = maxIterations; this.k = k; } @SuppressWarnings("unchecked") @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { Logger logger = this.getLogger(); switch (this.getState()) { case CREATED: logger.info("Initializing KMeansSampling."); // Initialize variables try { this.sample = (D) this.getInput().createEmptyCopy(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create a copy of the dataset.", e); } // create cluster JDKRandomGenerator r = new JDKRandomGenerator(); r.setSeed(this.seed); // update k if k=-1 int numClusters = this.k > 0 ? this.k : this.sampleSize; if (this.clusterResults == null) { KMeansPlusPlusClusterer<I> kMeansCluster = new KMeansPlusPlusClusterer<>(numClusters, this.maxIterations, this.distanceMeassure, r); logger.debug("Starting to cluster the dataset with k={} on {}x{} dataset.", numClusters, this.getInput().size(), this.getInput().getNumAttributes()); this.clusterResults = kMeansCluster.cluster(this.getInput()); // this is not interruptible!! logger.debug("Clustering ready."); } logger.info("KMeansSampling activated."); return this.activate(); case ACTIVE: return this.doAlgorithmStep(); default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/SampleComplementComputer.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; public class SampleComplementComputer { /** * Gets the data point contained in the original data that are not part of the * @return * @throws DatasetCreationException * @throws InterruptedException */ @SuppressWarnings("unchecked") public <I extends IInstance, D extends IDataset<I>> D getComplement(final D originalData, final D sample) throws DatasetCreationException, InterruptedException { if (sample == null) { throw new IllegalStateException("Sample computation has not started yet."); } if (originalData.isEmpty()) { throw new IllegalArgumentException("Cannot compute complement of an empty base set!"); } /* compute frequencies (necessary, because items could occur several times) */ Map<Object, Integer> frequenciesInInput = new HashMap<>(); Map<Object, Integer> frequenciesInSubSample = new HashMap<>(); Map<Object, Integer> frequenciesInComplement = new HashMap<>(); for (Object instance : originalData) { frequenciesInInput.put(instance.hashCode(), frequenciesInInput.computeIfAbsent(instance.hashCode(), k -> 0) + 1); frequenciesInComplement.put(instance.hashCode(), 0); frequenciesInSubSample.put(instance.hashCode(), 0); } for (Object instance : sample) { frequenciesInSubSample.put(instance.hashCode(), frequenciesInSubSample.computeIfAbsent(instance.hashCode(), k -> 0) + 1); // inserts 0 if, for some reason, the value has not been defined before } /* now compute complement */ D complement = (D) originalData.createEmptyCopy(); for (I instance : originalData) { Integer frequencyInComplement = frequenciesInComplement.get(instance.hashCode()); if (frequenciesInSubSample.get(instance.hashCode()) + frequencyInComplement < frequenciesInInput.get(instance.hashCode())) { complement.add(instance); frequenciesInComplement.put(instance.hashCode(), frequencyInComplement + 1); } } /* check plausibility (sizes should sum up) */ if (sample.size() + complement.size() != originalData.size()) { throw new IllegalStateException("The input set of size " + originalData.size() + " has been reduced to " + sample.size() + " + " + complement.size() + ". This is not plausible."); } else { for (Entry<Object, Integer> instanceWithFrequency : frequenciesInInput.entrySet()) { Object inst = instanceWithFrequency.getKey(); int frequencyNow = frequenciesInSubSample.get(inst) + frequenciesInComplement.get(inst); if (instanceWithFrequency.getValue() != frequencyNow) { throw new IllegalStateException("Frequency of instance " + inst + " was " + instanceWithFrequency.getValue() + " but is now " + frequencyNow); } } } return complement; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/SimpleRandomSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.ml.core.dataset.DatasetDeriver; public class SimpleRandomSampling<D extends IDataset<?>> extends ASamplingAlgorithm<D> { private Random random; private Collection<Integer> chosenIndices; private boolean isLargeSample; private int numberOfLastSample = 0; public SimpleRandomSampling(final Random random, final D input) { super(input); this.random = random; } @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { int n = this.getInput().size(); switch (this.getState()) { case CREATED: this.isLargeSample = this.sampleSize * 1.0 / n > .3; // if the sample contains more than 30%, we consider it large return this.activate(); case ACTIVE: /* if we have a large sample, we just create a shuffled list of indices, which will be the chosen elements */ if (this.isLargeSample) { this.chosenIndices = new ArrayList<>(n); for (int i = 0; i < n; i++) { if (i % 100 == 0) { this.checkAndConductTermination(); } this.chosenIndices.add(i); } Collections.shuffle((List<Integer>) this.chosenIndices, this.random); this.chosenIndices = ((List<Integer>) this.chosenIndices).subList(0, this.sampleSize); } /* if we have a small sample, randomly draw unchosen elements */ else { this.chosenIndices = new HashSet<>(); while (this.numberOfLastSample < this.sampleSize) { int i; if (this.numberOfLastSample % 100 == 0) { this.checkAndConductTermination(); } do { i = this.random.nextInt(this.sampleSize); } while (this.chosenIndices.contains(i)); this.chosenIndices.add(i); this.numberOfLastSample ++; } } /* create sample */ DatasetDeriver<D> deriver = new DatasetDeriver<>(this.getInput()); deriver.addIndices(this.chosenIndices); try { this.sample = deriver.build(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create sample.", e); } if (this.chosenIndices == null) { throw new IllegalStateException("Chosen indices must not be null!"); } return this.terminate(); case INACTIVE: this.doInactiveStep(); break; default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } return null; } public Collection<Integer> getChosenIndices() { if (this.chosenIndices == null) { throw new IllegalStateException("The algorithm has not run, so no indices have been chosen!"); } return Collections.unmodifiableCollection(this.chosenIndices); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/SystematicSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import java.util.Comparator; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.ml.core.dataset.DatasetDeriver; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; /** * Implementation of Systematic Sampling: Sort datapoints and pick every k-th * datapoint for the sample. * * @author Lukas Brandt */ public class SystematicSampling<D extends ILabeledDataset<?>> extends ASamplingAlgorithm<D> { private DatasetDeriver<D> sampleBuilder; private Random random; private D sortedDataset = null; private int k; private int startIndex; private int index; // Default Comparator to sort datapoints by their vector representation. private Comparator<IInstance> datapointComparator = (o1, o2) -> { double[] v1 = o1.getPoint(); double[] v2 = o2.getPoint(); for (int i = 0; i < Math.min(v1.length, v2.length); i++) { int c = Double.compare(v1[i], v2[i]); if (c != 0) { return c; } } return 0; }; /** * Simple constructor that uses the default datapoint comparator. * * @param random * Random Object for determining the sampling start point. */ public SystematicSampling(final Random random, final D input) { super(input); this.random = random; } /** * Constructor for a custom datapoint comparator. * * @param random * Random Object for determining the sampling start point. * @param datapointComparator * Comparator to sort the dataset. */ public SystematicSampling(final Random random, final Comparator<IInstance> datapointComparator, final D input) { this(random, input); this.datapointComparator = datapointComparator; } @SuppressWarnings("unchecked") @Override public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { switch (this.getState()) { case CREATED: // Initialize variables and sort dataset. try { if (this.sortedDataset == null) { this.sortedDataset = (D) this.getInput().createCopy(); this.sortedDataset.sort(this.datapointComparator); this.sampleBuilder = new DatasetDeriver<>(this.sortedDataset); } } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create a copy of the dataset.", e); } this.startIndex = this.random.nextInt(this.sortedDataset.size()); this.k = this.sortedDataset.size() / this.sampleSize; this.index = 0; return this.activate(); case ACTIVE: // If the sample size is not reached yet, add the next datapoint from the // systematic sampling method. if (this.sampleBuilder.currentSizeOfTarget() < this.sampleSize) { if (this.index % 100 == 0) { this.checkAndConductTermination(); } int e = (this.startIndex + (this.index++) * this.k) % this.sortedDataset.size(); this.sampleBuilder.add(e); return new SampleElementAddedEvent(this); } else { try { this.sample = this.sampleBuilder.build(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not build the sample.", e); } return this.terminate(); } case INACTIVE: this.doInactiveStep(); break; default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } return null; } public D getSortedDataset() { return this.sortedDataset; } public void setSortedDataset(final D sortedDataset) { this.sortedDataset = sortedDataset; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/WaitForSamplingStepEvent.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory; import org.api4.java.algorithm.IAlgorithm; import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent; public class WaitForSamplingStepEvent extends AAlgorithmEvent { public WaitForSamplingStepEvent(final IAlgorithm<?, ?> algorithm) { super(algorithm); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/APilotEstimateSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; import java.util.List; import java.util.Objects; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.filter.unsupervised.sampling.ISamplingAlgorithm; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; public abstract class APilotEstimateSampling<D extends ILabeledDataset<? extends ILabeledInstance>> extends CaseControlLikeSampling<D> { private Logger logger = LoggerFactory.getLogger(APilotEstimateSampling.class); private final ISamplingAlgorithm<D> subSampler; protected int preSampleSize; private final IClassifier pilotEstimator; protected APilotEstimateSampling(final D input, final IClassifier pilotClassifier) { this(input, null, 1, pilotClassifier); } protected APilotEstimateSampling(final D input, final ISamplingAlgorithmFactory<D, ?> subSamplingFactory, final int preSampleSize, final IClassifier pilotClassifier) { super(input); Objects.requireNonNull(pilotClassifier); this.pilotEstimator = pilotClassifier; this.preSampleSize = preSampleSize; if (subSamplingFactory != null) { this.subSampler = subSamplingFactory.getAlgorithm(preSampleSize, input, new Random(0)); } else { this.subSampler = null; } } @Override public List<Pair<ILabeledInstance, Double>> computeAcceptanceThresholds() throws InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException, AlgorithmException { /* if the pilot estimator should be trained only on a subset, build this subset now. Otherwise train it on the whole dataset. */ if (this.subSampler != null) { D subSample = this.subSampler.call(); this.logger.info("Fitting pilot with reduced dataset of {}/{} instances.", subSample.size(), this.getInput().size()); this.pilotEstimator.fit(subSample); } else { this.logger.info("Fitting pilot with full dataset."); this.pilotEstimator.fit(this.getInput()); } return this.calculateAcceptanceThresholdsWithTrainedPilot(this.getInput(), this.pilotEstimator); } public abstract List<Pair<ILabeledInstance, Double>> calculateAcceptanceThresholdsWithTrainedPilot(D instances, IClassifier pilotEstimator) throws InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException, AlgorithmException; public IClassifier getPilotEstimator() { return this.pilotEstimator; } @Override public void setLoggerName(final String loggerName) { this.logger = LoggerFactory.getLogger(loggerName); super.setLoggerName(loggerName + ".ccsampling"); } @Override public String getLoggerName() { return this.logger.getName(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/CaseControlLikeSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; import java.util.HashMap; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.ml.core.dataset.DatasetDeriver; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; public abstract class CaseControlLikeSampling<D extends ILabeledDataset<? extends ILabeledInstance>> extends ASamplingAlgorithm<D> { private Logger logger = LoggerFactory.getLogger(CaseControlLikeSampling.class); protected Random rand; protected List<Pair<ILabeledInstance, Double>> acceptanceThresholds = null; private final DatasetDeriver<D> deriver; private int currentlyConsideredIndex = 0; protected CaseControlLikeSampling(final D input) { super(input); this.deriver = new DatasetDeriver<>(input); } public List<Pair<ILabeledInstance, Double>> getAcceptanceThresholds() { return this.acceptanceThresholds; } public void setAcceptanceTresholds(final List<Pair<ILabeledInstance, Double>> thresholds) { this.acceptanceThresholds = thresholds; } public abstract List<Pair<ILabeledInstance, Double>> computeAcceptanceThresholds() throws ThresholdComputationFailedException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException, AlgorithmException; @Override public final IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { this.logger.debug("Executing next step."); switch (this.getState()) { case CREATED: try { this.acceptanceThresholds = this.computeAcceptanceThresholds(); } catch (ThresholdComputationFailedException e1) { throw new AlgorithmException("Sampler initialization failed due to problems in threshold computation.", e1); } this.logger.info("Initialized sampler with {} acceptance thresholds and {}x{}-dataset", this.acceptanceThresholds.size(), this.getInput().size(), this.getInput().getNumAttributes()); return this.activate(); case ACTIVE: /* draw next sample element */ int i = 0; while (this.deriver.currentSizeOfTarget() < this.sampleSize && this.currentlyConsideredIndex < this.acceptanceThresholds.size()) { if (i++ % 100 == 0) { this.checkAndConductTermination(); } double r = this.rand.nextDouble(); this.currentlyConsideredIndex ++; if (this.acceptanceThresholds.get(this.currentlyConsideredIndex - 1).getY().doubleValue() >= r) { this.deriver.add(this.currentlyConsideredIndex - 1); return new SampleElementAddedEvent(this); } } /* if no more samples can or shall be drawn, create the sample */ try { this.sample = this.deriver.build(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create split.", e); } this.logger.info("Sampling has finished. Shutting down sampling algorithm."); return this.doInactiveStep(); default: throw new IllegalStateException("No actions defined for algorithm state " + this.getState()); } } /** * Count occurrences of every class. Needed to determine the probability for all * instances of that class * * @param dataset * Dataset of the sample algorithm object * @return HashMap of occurrences * @throws ClassNotFoundException */ protected HashMap<Object, Integer> countClassOccurrences(final D dataset) { HashMap<Object, Integer> classOccurrences = new HashMap<>(); for (ILabeledInstance instance : dataset) { boolean classExists = false; for (Object clazz : classOccurrences.keySet()) { if (clazz.equals(instance.getLabel())) { classExists = true; } } if (classExists) { classOccurrences.put(instance.getLabel(), classOccurrences.get(instance.getLabel()).intValue() + 1); } else { classOccurrences.put(instance.getLabel(), 0); } } return classOccurrences; } @Override public void setLoggerName(final String loggerName) { this.logger = LoggerFactory.getLogger(loggerName); super.setLoggerName(loggerName + ".asampling"); } @Override public String getLoggerName() { return this.logger.getName(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/ClassifierWeightedSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.stream.IntStream; import org.apache.commons.math3.distribution.EnumeratedIntegerDistribution; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.IPrediction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.sets.Pair; /** * The idea behind this Sampling method is to weight instances depended on the * way a pilot estimator p classified them. Instances that p classified right * but was unsure contain the most information and are most likely to be chosen. * Instances that p is very sure about and Instances that p is quite sure about * their actual class and classified them falsely, are medium likely to be * chosen. Instances that p is very unsure about their actual class and * classified them falsely are not likely to be chosen. Note that any instance * still has a base probability to be chosen. * * @author noni4 * * @param <I> */ public class ClassifierWeightedSampling<D extends ILabeledDataset<? extends ILabeledInstance>> extends APilotEstimateSampling<D> { private Logger logger = LoggerFactory.getLogger(ClassifierWeightedSampling.class); public ClassifierWeightedSampling(final IClassifier pilotEstimator, final Random rand, final D dataset) { super(dataset, pilotEstimator); this.rand = rand; } private double getMean(final ILabeledDataset<?> instances) throws InterruptedException { double sum = 0.0; for (ILabeledInstance instance : instances) { try { sum += this.getPilotEstimator().predict(instance).getProbabilityOfLabel(instance.getLabel()); } catch (InterruptedException e) { throw e; } catch (Exception e) { this.logger.error("Unexpected error in pilot estimator", e); } } return sum / instances.size(); } @Override public List<Pair<ILabeledInstance, Double>> calculateAcceptanceThresholdsWithTrainedPilot(final D dataset, final IClassifier pilot) throws InterruptedException { /* compute mean value and base values the instances must have */ double mid = this.getMean(dataset); double baseValue = 10 * mid + 1; // arbitrary value, there most likely be better one double addForRightClassification = baseValue + 2 * mid; // like baseValue /* determine probability for each index to be chosen */ double[] weights = new double[dataset.size()]; for (int i = 0; i < weights.length; i++) { try { IPrediction prediction = pilot.predict(dataset.get(i)); if (prediction.getLabelWithHighestProbability() == dataset.get(i).getLabel()) { weights[i] = addForRightClassification - prediction.getProbabilityOfLabel(dataset.get(i).getLabel()); } else { weights[i] = baseValue + prediction.getProbabilityOfLabel(prediction.getLabelWithHighestProbability()); } } catch (Exception e) { weights[i] = 0; } } int[] indices = IntStream.range(0, this.getInput().size()).toArray(); EnumeratedIntegerDistribution finalDistribution = new EnumeratedIntegerDistribution(indices, weights); finalDistribution.reseedRandomGenerator(this.rand.nextLong()); /* now draw <number of samples> many indices whose threshold will be set to 1 */ int n = this.getSampleSize(); Set<Integer> consideredIndices = new HashSet<>(); for (int i = 0; i < n; i++) { int index; do { index = finalDistribution.sample(); } while (consideredIndices.contains(index)); consideredIndices.add(index); } /* now create the list of pairs */ List<Pair<ILabeledInstance, Double>> thresholds = new ArrayList<>(); int m = dataset.size(); for (int i = 0; i < m; i++) { ILabeledInstance inst = dataset.get(i); double threshold = consideredIndices.contains(i) ? 1 : 0; thresholds.add(new Pair<>(inst, threshold)); } return thresholds; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/LocalCaseControlSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.basic.sets.Pair; public class LocalCaseControlSampling extends APilotEstimateSampling<ILabeledDataset<?>> { public LocalCaseControlSampling(final Random rand, final int preSampleSize, final ILabeledDataset<?> input, final IClassifier pilot) { super(input, pilot); this.rand = rand; this.preSampleSize = preSampleSize; } @Override public List<Pair<ILabeledInstance, Double>> calculateAcceptanceThresholdsWithTrainedPilot(final ILabeledDataset<?> instances, final IClassifier pilotEstimator) throws AlgorithmTimeoutedException, InterruptedException, AlgorithmExecutionCanceledException { double boundaryOfCurrentInstance = 0.0; ArrayList<Pair<ILabeledInstance, Double>> instanceProbabilityBoundaries = new ArrayList<>(); double sumOfDistributionLosses = 0; double loss; int i = 0; for (ILabeledInstance instance : instances) { if (i++ % 100 == 0) { this.checkAndConductTermination(); } try { loss = 1 - pilotEstimator.predict(instance).getProbabilityOfLabel(instance.getLabel()); } catch (InterruptedException e) { throw e; } catch (Exception e) { loss = 1; } sumOfDistributionLosses += loss; } for (ILabeledInstance instance : instances) { if (i++ % 100 == 0) { this.checkAndConductTermination(); } try { loss = 1 - pilotEstimator.predict(instance).getProbabilityOfLabel(instance.getLabel()); } catch (InterruptedException e) { throw e; } catch (Exception e) { loss = 1; } boundaryOfCurrentInstance += loss / sumOfDistributionLosses; instanceProbabilityBoundaries.add(new Pair<>(instance, boundaryOfCurrentInstance)); } ArrayList<Pair<ILabeledInstance, Double>> probabilityBoundaries = new ArrayList<>(); int iterator = 0; for (ILabeledInstance instance : instances) { if (iterator % 100 == 0) { this.checkAndConductTermination(); } probabilityBoundaries.add(new Pair<>(instance, instanceProbabilityBoundaries.get(iterator).getY())); iterator++; } return probabilityBoundaries; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/OSMAC.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import ai.libs.jaicore.basic.sets.Pair; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; public class OSMAC<D extends ILabeledDataset<? extends ILabeledInstance>> extends APilotEstimateSampling<D> { public OSMAC(final Random rand, final D input, final IClassifier pilot) { super(input, pilot); this.rand = rand; } public OSMAC(final Random rand, final D input, final ISamplingAlgorithmFactory<D, ?> subSamplingFactory, final int preSampleSize, final IClassifier pilot) { super(input, subSamplingFactory, preSampleSize, pilot); this.rand = rand; } @Override public List<Pair<ILabeledInstance, Double>> calculateAcceptanceThresholdsWithTrainedPilot(final D instances, final IClassifier pilotEstimator) throws AlgorithmTimeoutedException, InterruptedException, AlgorithmExecutionCanceledException { double boundaryOfCurrentInstance = 0.0; ArrayList<Pair<ILabeledInstance, Double>> probabilityBoundaries = new ArrayList<>(); double sumOfDistributionLosses = 0; int vectorLength; int n = instances.size(); double[] normalizedLosses = new double[n]; for (int i = 0; i < n; i++) { if (i % 100 == 0) { this.checkAndConductTermination(); } ILabeledInstance instance = instances.get(i); vectorLength = 0; for (Object attributeVal : instance.getAttributes()) { if (!attributeVal.equals("?")) { // just ignore missing values if (!(attributeVal instanceof Number)) { throw new IllegalArgumentException("Illegal non-double attribute value " + attributeVal); } vectorLength += Double.valueOf(attributeVal.toString()); } } double loss; try { loss = 1 - pilotEstimator.predict(instance).getProbabilityOfLabel(instance.getLabel()); } catch (Exception e) { loss = 1; } normalizedLosses[i] = loss * vectorLength; sumOfDistributionLosses += normalizedLosses[i]; } for (int i = 0; i < n; i++) { if (i % 100 == 0) { this.checkAndConductTermination(); } boundaryOfCurrentInstance += normalizedLosses[i] / sumOfDistributionLosses; probabilityBoundaries.add(new Pair<>(instances.get(i), boundaryOfCurrentInstance)); } return probabilityBoundaries; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/casecontrol/ThresholdComputationFailedException.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol; public class ThresholdComputationFailedException extends Exception { public ThresholdComputationFailedException(final String message) { super(message); } public ThresholdComputationFailedException(final String message, final Exception e) { super(message, e); } public ThresholdComputationFailedException(final Exception e) { super(e); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/ASampleAlgorithmFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; import java.util.Objects; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.common.reconstruction.IReconstructible; import org.api4.java.common.reconstruction.IReconstructionInstruction; import org.api4.java.common.reconstruction.IReconstructionPlan; import ai.libs.jaicore.basic.reconstruction.ReconstructionInstruction; import ai.libs.jaicore.basic.reconstruction.ReconstructionPlan; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; public abstract class ASampleAlgorithmFactory<D extends IDataset<?>, A extends ASamplingAlgorithm<D>> implements ISamplingAlgorithmFactory<D, A>, IReconstructible { private int sampleSize; private long seed; private Random random; public static <D extends IDataset<?>, A extends ASamplingAlgorithm<D>, T extends ASampleAlgorithmFactory<D, A>> T create(final Class<T> factoryClazz, final int sampleSize, final long seed) throws InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException { T factory = factoryClazz.getConstructor().newInstance(); factory.setRandom(new Random(seed)); factory.setSampleSize(sampleSize); return factory; } protected ReconstructionInstruction getConstructionInstruction() { return new ReconstructionInstruction(ASampleAlgorithmFactory.class.getName(), "create", new Class<?>[] {Class.class, int.class, long.class}, new Object[] {this.getClass(), this.sampleSize, this.seed}); } @Override public IReconstructionPlan getConstructionPlan() { return new ReconstructionPlan(Arrays.asList(this.getConstructionInstruction())); } @Override public void addInstruction(final IReconstructionInstruction instruction) { throw new UnsupportedOperationException("No instructions can be added to a sampling factory."); } @Override public A getAlgorithm(final D inputDataset) { Objects.requireNonNull(this.random); if (this.sampleSize == 0 || this.sampleSize > inputDataset.size()) { throw new IllegalStateException("Illegal sample size " + this.sampleSize + " for dataset with " + inputDataset.size() + " points."); } return this.getAlgorithm(this.sampleSize, inputDataset, this.random); } public int getSampleSize() { return this.sampleSize; } public void setSampleSize(final int sampleSize) { this.sampleSize = sampleSize; } public Random getRandom() { return this.random; } public void setRandom(final Random random) { this.random = random; this.seed = -1; } public long getSeed() { return this.seed; } public void setSeed(final long seed) { this.seed = seed; this.random = new Random(seed); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/GmeansSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.ml.distance.ManhattanDistance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.IClusterableInstance; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.GmeansSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; public class GmeansSamplingFactory<I extends IClusterableInstance, D extends ILabeledDataset<I>> extends ASampleAlgorithmFactory<D, GmeansSampling<I, D>> implements IRerunnableSamplingAlgorithmFactory<D, GmeansSampling<I, D>> { private GmeansSampling<I, D> previousRun; private int maxIterationsInnerLoop = 100; private long clusterSeed = System.currentTimeMillis(); private DistanceMeasure distanceMeassure = new ManhattanDistance(); @Override public void setPreviousRun(final GmeansSampling<I, D> previousRun) { this.previousRun = previousRun; } /** * Set the seed the clustering will use for initialization. Default is without a fix seed and the system time instead. * * @param clusterSeed */ public void setClusterSeed(final long clusterSeed) { this.clusterSeed = clusterSeed; } /** * Set the distance measure for the clustering. Default is the Manhattan distance. * * @param distanceMeassure */ public void setDistanceMeassure(final DistanceMeasure distanceMeassure) { this.distanceMeassure = distanceMeassure; } public int getMaxIterationsInnerLoop() { return this.maxIterationsInnerLoop; } public void setMaxIterationsInnerLoop(final int maxIterationsInnerLoop) { this.maxIterationsInnerLoop = maxIterationsInnerLoop; } @Override public GmeansSampling<I, D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { GmeansSampling<I, D> gmeansSampling = new GmeansSampling<>(this.maxIterationsInnerLoop, this.clusterSeed, inputDataset); gmeansSampling.setSampleSize(sampleSize); gmeansSampling.setDistanceMeassure(this.distanceMeassure); if (this.previousRun != null && this.previousRun.getClusterResults() != null) { gmeansSampling.setClusterResults(this.previousRun.getClusterResults()); } return gmeansSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/KmeansSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.ml.distance.ManhattanDistance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.KmeansSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; public class KmeansSamplingFactory<I extends ILabeledInstance & Clusterable, D extends ILabeledDataset<I>> extends ASampleAlgorithmFactory<D, KmeansSampling<I, D>> implements IRerunnableSamplingAlgorithmFactory<D, KmeansSampling<I, D>> { private KmeansSampling<I, D> previousRun; private int k = -1; private long clusterSeed = System.currentTimeMillis(); private DistanceMeasure distanceMeassure = new ManhattanDistance(); private int maxIterations = -1; @Override public void setPreviousRun(final KmeansSampling<I, D> previousRun) { this.previousRun = previousRun; } /** * Set how many clusters shall be created. Default is the sample size; * * @param k * Parameter k of k-means. */ public void setK(final int k) { this.k = k; } /** * Set the seed the clustering will use for initialization. Default is without a * fix seed and the system time instead. * * @param clusterSeed */ public void setClusterSeed(final long clusterSeed) { this.clusterSeed = clusterSeed; } /** * Set the distance measure for the clustering. Default is the Manhattan * distance. * * @param distanceMeassure */ public void setDistanceMeassure(final DistanceMeasure distanceMeassure) { this.distanceMeassure = distanceMeassure; } public void setMaxIterations(final int maxIterations) { this.maxIterations = maxIterations; } @Override public KmeansSampling<I, D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { int kValue = sampleSize; if (this.k > 0) { kValue = this.k; } KmeansSampling<I, D> kmeansSampling = new KmeansSampling<>(this.clusterSeed, kValue, this.maxIterations, inputDataset); kmeansSampling.setSampleSize(sampleSize); kmeansSampling.setDistanceMeassure(this.distanceMeassure); if (this.previousRun != null && this.previousRun.getClusterResults() != null) { kmeansSampling.setClusterResults(this.previousRun.getClusterResults()); } return kmeansSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/LabelBasedStratifiedSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling.LabelBasedStratifiedSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling.StratifiedSampling; public class LabelBasedStratifiedSamplingFactory<D extends ILabeledDataset<?>> extends ASampleAlgorithmFactory<D, StratifiedSampling<D>> implements IRerunnableSamplingAlgorithmFactory<D, StratifiedSampling<D>> { @Override public StratifiedSampling<D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { LabelBasedStratifiedSampling<D> sampling = new LabelBasedStratifiedSampling<>(random, inputDataset); sampling.setSampleSize(sampleSize); return sampling; } @Override public void setPreviousRun(final StratifiedSampling<D> previousRun) { /* ignore this */ } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/LocalCaseControlSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol.LocalCaseControlSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; public class LocalCaseControlSamplingFactory extends ASampleAlgorithmFactory<ILabeledDataset<?>, LocalCaseControlSampling> implements IRerunnableSamplingAlgorithmFactory<ILabeledDataset<?>, LocalCaseControlSampling> { private LocalCaseControlSampling previousRun = null; private int preSampleSize = -1; private IClassifier pilot; @Override public void setPreviousRun(final LocalCaseControlSampling previousRun) { this.previousRun = previousRun; } /** * Set the size of the sample the pilot estimator will be trained with. Default * is half the dataset. * * @param preSampleSize */ public void setPreSampleSize(final int preSampleSize) { this.preSampleSize = preSampleSize; } public IClassifier getPilot() { return this.pilot; } public void setPilot(final IClassifier pilot) { this.pilot = pilot; } @Override public LocalCaseControlSampling getAlgorithm(final int sampleSize, final ILabeledDataset<?> inputDataset, final Random random) { if (this.pilot == null) { throw new IllegalStateException("No pilot has been defined."); } if (this.preSampleSize == -1) { throw new IllegalStateException("No sample size has been defined for the pilot."); } LocalCaseControlSampling localCaseControlSampling = new LocalCaseControlSampling(random, this.preSampleSize, inputDataset, this.pilot); if (this.previousRun != null && this.previousRun.getAcceptanceThresholds() != null) { localCaseControlSampling.setAcceptanceTresholds(this.previousRun.getAcceptanceThresholds()); } localCaseControlSampling.setSampleSize(sampleSize); return localCaseControlSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/OSMACSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Objects; import java.util.Random; import org.api4.java.ai.ml.classification.IClassifier; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.casecontrol.OSMAC; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; public class OSMACSamplingFactory extends ASampleAlgorithmFactory<ILabeledDataset<?>, OSMAC<ILabeledDataset<?>>> implements IRerunnableSamplingAlgorithmFactory<ILabeledDataset<?>, OSMAC<ILabeledDataset<?>>> { private OSMAC<ILabeledDataset<?>> previousRun; private int preSampleSize = -1; private IClassifier pilot; /** * Set the size of the sample the pilot estimator will be trained with. Default * is half the dataset. * * @param preSampleSize */ public void setPreSampleSize(final int preSampleSize) { this.preSampleSize = preSampleSize; } public IClassifier getPilot() { return this.pilot; } public void setPilot(final IClassifier pilot) { this.pilot = pilot; } @Override public void setPreviousRun(final OSMAC<ILabeledDataset<?>> previousRun) { this.previousRun = previousRun; } @Override public OSMAC<ILabeledDataset<?>> getAlgorithm(final int sampleSize, final ILabeledDataset<?> inputDataset, final Random random) { Objects.requireNonNull(inputDataset); Objects.requireNonNull(this.pilot); if (inputDataset.isEmpty()) { throw new IllegalArgumentException("Cannot create OSMAC for an empty dataset."); } SimpleRandomSamplingFactory<ILabeledDataset<?>> subSampleFactory = new SimpleRandomSamplingFactory<>(); subSampleFactory.setRandom(random); OSMAC<ILabeledDataset<?>> osmac = new OSMAC<>(random, inputDataset, subSampleFactory, this.preSampleSize, this.pilot); if (this.previousRun != null && this.previousRun.getAcceptanceThresholds() != null) { osmac.setAcceptanceTresholds(this.previousRun.getAcceptanceThresholds()); } osmac.setSampleSize(sampleSize); return osmac; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/SimpleRandomSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.SimpleRandomSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; public class SimpleRandomSamplingFactory<D extends ILabeledDataset<?>> extends ASampleAlgorithmFactory<D, SimpleRandomSampling<D>> implements ISamplingAlgorithmFactory<D, SimpleRandomSampling<D>> { @Override public SimpleRandomSampling<D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { SimpleRandomSampling<D> simpleRandomSampling = new SimpleRandomSampling<>(random, inputDataset); simpleRandomSampling.setSampleSize(sampleSize); return simpleRandomSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/StratifiedSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling.IStratifier; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling.StratifiedSampling; public class StratifiedSamplingFactory<D extends IDataset<?>> extends ASampleAlgorithmFactory<D, StratifiedSampling<D>> implements ISamplingAlgorithmFactory<D, StratifiedSampling<D>> { private IStratifier stratificationTechniqe; public StratifiedSamplingFactory(final IStratifier stratificationTechniqe) { this.stratificationTechniqe = stratificationTechniqe; } @Override public StratifiedSampling<D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { StratifiedSampling<D> stratifiedSampling = new StratifiedSampling<>(this.stratificationTechniqe, random, inputDataset); stratifiedSampling.setSampleSize(sampleSize); return stratifiedSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/SystematicSamplingFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories; import java.util.Comparator; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.SystematicSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; public class SystematicSamplingFactory<D extends ILabeledDataset<?>> extends ASampleAlgorithmFactory<D, SystematicSampling<D>> implements IRerunnableSamplingAlgorithmFactory<D, SystematicSampling<D>> { private Comparator<IInstance> datapointComparator = null; private SystematicSampling<D> previousRun = null; /** * Set a custom comparator that will be used to sort the datapoints before * sampling. * * @param datapointComparator * Comparator for two datapoints. */ public void setDatapointComparator(final Comparator<IInstance> datapointComparator) { this.datapointComparator = datapointComparator; } @Override public void setPreviousRun(final SystematicSampling<D> previousRun) { this.previousRun = previousRun; } @Override public SystematicSampling<D> getAlgorithm(final int sampleSize, final D inputDataset, final Random random) { SystematicSampling<D> systematicSampling; if (this.datapointComparator != null) { systematicSampling = new SystematicSampling<>(random, this.datapointComparator, inputDataset); } else { systematicSampling = new SystematicSampling<>(random, inputDataset); } systematicSampling.setSampleSize(sampleSize); if (this.previousRun != null && this.previousRun.getSortedDataset() != null) { systematicSampling.setSortedDataset(this.previousRun.getSortedDataset()); } return systematicSampling; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/interfaces/IRerunnableSamplingAlgorithmFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces; import org.api4.java.ai.ml.core.dataset.IDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; /** * Extension of the ISamplingAlgorithmFactory for sampling algorithms that can * re-use informations from a previous run of the Sampling algorithm. * * @author Lukas Brandt * @param <I> Type of the dataset instances. * @param <A> Type of the sampling algorithm that will be created. */ public interface IRerunnableSamplingAlgorithmFactory<D extends IDataset<?>, A extends ASamplingAlgorithm<D>> extends ISamplingAlgorithmFactory<D, A> { /** * Set the previous run of the sampling algorithm, if one occurred, can be set * here to get data from it. * * @param previousRun Algorithm object of the previous of the sampling * algorithm. */ public void setPreviousRun(A previousRun); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/factories/interfaces/ISamplingAlgorithmFactory.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; /** * Interface for a factory, which creates a sampling algorithm. * * @author Lukas Brandt * @param <A> Type of the sampling algorithm that will be created. */ public interface ISamplingAlgorithmFactory<D extends IDataset<?>, A extends ASamplingAlgorithm<D>> { /** * After the necessary config is done, this method returns a fully configured * instance of a sampling algorithm. * * @param sampleSize Desired size of the sample that will be created. * @param inputDataset Dataset where the sample will be drawn from. * @param random Random object to make samples reproducible. * @return Configured sampling algorithm object. */ public A getAlgorithm(int sampleSize, D inputDataset, Random random); /** * After the necessary config is done, this method returns a fully configured * instance of a sampling algorithm. * * Here, it is assumed that random aspects or sample sizes have already been defined before **/ public A getAlgorithm(D inputDataset); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/AttributeBasedStratifier.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; import ai.libs.jaicore.ml.core.dataset.schema.DatasetPropertyComputer; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling.DiscretizationHelper.DiscretizationStrategy; /** * This class is responsible for computing the amount of strati in * attribute-based stratified sampling and assigning elements to the strati. * * @author Felix Weiland * */ public class AttributeBasedStratifier implements IStratifier, ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(AttributeBasedStratifier.class); /** Default strategy for discretization */ private static final DiscretizationStrategy DEFAULT_DISCRETIZATION_STRATEGY = DiscretizationStrategy.EQUAL_SIZE; private final DiscretizationHelper discretizationHelper = new DiscretizationHelper(); /** * Default number of categories to be used for discretization */ private static final int DEFAULT_DISCRETIZATION_CATEGORY_AMOUNT = 5; /** * Indices of attributes that have to be taken into account for stratum * assignment */ private List<Integer> attributeIndices; /** Map from attribute values to stratum id */ private Map<List<Object>, Integer> stratumIDs; /** Number of CPU cores to be used */ private int numCPUs = 1; /** The data set which has to be sampled */ private IDataset<?> dataset; private int numAttributes; /** Policies for discretization */ private Map<Integer, AttributeDiscretizationPolicy> discretizationPolicies; /** The discretization strategy selected by the user */ private DiscretizationStrategy discretizationStrategy; /** The number of categories for discretization selected by the user */ private int numberOfCategories; public AttributeBasedStratifier() { super(); this.discretizationStrategy = DEFAULT_DISCRETIZATION_STRATEGY; this.numberOfCategories = DEFAULT_DISCRETIZATION_CATEGORY_AMOUNT; } public AttributeBasedStratifier(final List<Integer> attributeIndices) { this(attributeIndices, null); this.discretizationStrategy = DEFAULT_DISCRETIZATION_STRATEGY; this.numberOfCategories = DEFAULT_DISCRETIZATION_CATEGORY_AMOUNT; } public AttributeBasedStratifier(final List<Integer> attributeIndices, final DiscretizationStrategy discretizationStrategy, final int numberOfCategories) { this(attributeIndices, null); this.discretizationStrategy = discretizationStrategy; this.numberOfCategories = numberOfCategories; } public AttributeBasedStratifier(final List<Integer> attributeIndices, final Map<Integer, AttributeDiscretizationPolicy> discretizationPolicies) { super(); // Validate attribute indices if (attributeIndices == null || attributeIndices.isEmpty()) { throw new IllegalArgumentException("No attribute indices are provided!"); } this.attributeIndices = attributeIndices; this.discretizationPolicies = discretizationPolicies; this.logger.info("Created assigner. Attributes to be discretized: {}", discretizationPolicies == null ? "none" : discretizationPolicies.keySet()); } @Override public int createStrati(final IDataset<?> dataset) { this.logger.debug("Selecting number of strati for dataset with {} items.", dataset.size()); if (this.dataset != null) { throw new IllegalStateException("Creating strati more than once is forbidden!"); } this.logger.debug("init(): enter"); /* first, conduct some consistency checks */ Objects.requireNonNull(dataset, "Cannot create strati. Dataset is null."); this.dataset = dataset; this.numAttributes = dataset.getNumAttributes(); /* consistency check of attribute indices */ int n = dataset.getNumAttributes(); for (int i : this.attributeIndices) { if (i < 0) { throw new IllegalArgumentException("Attribute index for stratified splits must not be negative!"); } if (i > n) { throw new IllegalArgumentException("Attribute index for stratified splits must not exceed number of attributes!"); } if (i == n && !(dataset instanceof ILabeledDataset)) { throw new IllegalArgumentException("Attribute index for stratified splits must only equal the number of attributes if the dataset is labeled, because then the label column id is the number of attributes!"); } } this.logger.debug("Forming the strati over {} attributes.", this.attributeIndices.size()); /* now compute the set of strati labels. There is one stratum for each element in the cartesian product of * all possible combinations of (maybe discretized) values for the given attribute indices */ DatasetPropertyComputer dpc = new DatasetPropertyComputer(); dpc.setLoggerName(this.getLoggerName() + ".dpc"); Map<Integer, Set<Object>> attributeValues = dpc.computeAttributeValues(dataset, this.attributeIndices, this.numCPUs); if (this.logger.isDebugEnabled()) { this.logger.debug("The values for the different attributes are: {}", attributeValues.entrySet().stream().map(e -> "\n\t" + e.getKey() + ": " + e.getValue()).collect(Collectors.toList())); } this.discretizeAttributeValues(attributeValues); List<Set<Object>> sets = new ArrayList<>(attributeValues.values()); Set<List<Object>> cartesianProduct = Sets.cartesianProduct(sets); this.logger.info("There are {} elements in the cartesian product of the attribute values", cartesianProduct.size()); /* now assign an ID to each stratum. We do not use a list here for more effective look-up later */ this.logger.info("Assigning stratum numbers to elements in the cartesian product.."); this.stratumIDs = new HashMap<>(); int stratumCounter = 0; for (List<Object> tuple : cartesianProduct) { this.stratumIDs.put(tuple, stratumCounter++); } this.logger.info("Initialized strati assigner with {} strati.", this.stratumIDs.size()); return this.stratumIDs.size(); } private void discretizeAttributeValues(final Map<Integer, Set<Object>> attributeValues) { if (this.discretizationPolicies == null) { this.logger.info("No discretization policies provided. Computing defaults."); this.discretizationPolicies = this.discretizationHelper.createDefaultDiscretizationPolicies(this.dataset, this.attributeIndices, attributeValues, this.discretizationStrategy, this.numberOfCategories); } if (!this.discretizationPolicies.isEmpty()) { if (this.logger.isInfoEnabled()) { this.logger.info("Discretizing numeric attributes using policies: {}", this.discretizationPolicies); } this.discretizationHelper.discretizeAttributeValues(this.discretizationPolicies, attributeValues); } this.logger.info("computeAttributeValues(): leave"); } @Override public void setNumCPUs(final int numberOfCPUs) { if (numberOfCPUs < 1) { throw new IllegalArgumentException("Number of CPU cores must be nonnegative"); } this.numCPUs = numberOfCPUs; } @Override public int getNumCPUs() { return this.numCPUs; } @Override public int getStratum(final IInstance datapoint) { if (this.dataset == null) { throw new IllegalStateException("The technique has not been initialized yet."); } // Compute concrete attribute values relevant for the stratum for the particular instance List<Object> instanceAttributeValues = new ArrayList<>(this.attributeIndices.size()); for (int i = 0; i < this.attributeIndices.size(); i++) { int attributeIndex = this.attributeIndices.get(i); Object value; // Has value to be discretized? if (this.toBeDiscretized(attributeIndex)) { Object raw; if (attributeIndex == this.dataset.getNumAttributes()) { // this can only happen for labeled instances raw = ((ILabeledInstance) datapoint).getLabel(); } else { raw = datapoint.getAttributeValue(attributeIndex); } value = this.discretizationHelper.discretize((double) raw, this.discretizationPolicies.get(attributeIndex)); Objects.requireNonNull(value); } else { if (attributeIndex == this.numAttributes) { // this can only happen for labeled instances value = ((ILabeledInstance) datapoint).getLabel(); if (value == null) { throw new IllegalArgumentException("Cannot assign data point " + datapoint + " to any stratum, because it has no label."); } } else { value = datapoint.getAttributeValue(attributeIndex); Objects.requireNonNull(value); } } instanceAttributeValues.add(value); } int stratum = this.stratumIDs.get(instanceAttributeValues); this.logger.debug("Attribute values are: {}. Corresponding stratum is: {}", instanceAttributeValues, stratum); return stratum; } private boolean toBeDiscretized(final int index) { return this.discretizationPolicies.containsKey(index); } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); this.discretizationHelper.setLoggerName(name + ".discretizer"); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/AttributeDiscretizationPolicy.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.List; import org.apache.commons.math3.geometry.euclidean.oned.Interval; public class AttributeDiscretizationPolicy { private List<Interval> intervals; public AttributeDiscretizationPolicy(final List<Interval> intervals) { super(); this.intervals = intervals; } public List<Interval> getIntervals() { return this.intervals; } public void setIntervals(final List<Interval> intervals) { this.intervals = intervals; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.intervals == null) ? 0 : this.intervals.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } AttributeDiscretizationPolicy other = (AttributeDiscretizationPolicy) obj; if (this.intervals == null) { if (other.intervals != null) { return false; } } else if (!this.intervals.equals(other.intervals)) { return false; } return true; } @Override public String toString() { return "AttributeDiscretizationPolicy [intervals=" + this.intervals + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/ClusterStratiAssigner.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.Arrays; import java.util.List; import java.util.Objects; import org.apache.commons.math3.ml.clustering.CentroidCluster; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class ClusterStratiAssigner implements IStratifier { private static final Logger LOG = LoggerFactory.getLogger(ClusterStratiAssigner.class); private IDataset<?> dataset; protected int randomSeed; protected DistanceMeasure distanceMeasure; private List<CentroidCluster<Clusterable>> clusters; public void setDataset(final IDataset<?> dataset) { Objects.requireNonNull(dataset); if (dataset.isEmpty()) { throw new IllegalArgumentException("Cannot compute strati for empty dataset."); } if (!Clusterable.class.isAssignableFrom(dataset.getClassOfInstances())) { boolean allElementsClusterable = dataset.stream().allMatch(Clusterable.class::isInstance); if (!allElementsClusterable) { throw new IllegalArgumentException("Dataset does contain elements that are not clusterable elements, but only elements of class " + dataset.getClassOfInstances() + "."); } } this.dataset = dataset; } @Override public int getStratum(final IInstance datapoint) { if (this.dataset == null) { throw new IllegalStateException("No dataset has been set, so no strati have been built!"); } if (!this.dataset.contains(datapoint)) { throw new IllegalArgumentException("Given datapoint " + datapoint + " is not in the original dataset with " + this.dataset.size() + " entries."); } for (int i = 0; i < this.clusters.size(); i++) { List<Clusterable> clusterPoints = this.clusters.get(i).getPoints(); for (int n = 0; n < clusterPoints.size(); n++) { if (Arrays.equals(datapoint.getPoint(), clusterPoints.get(n).getPoint())) { return i; } } } throw new IllegalStateException("Datapoint was not found in any cluster. This should not happen."); } @Override public void setNumCPUs(final int numberOfCPUs) { LOG.warn("setNumCPUs() is not supported for this class"); } @Override public int getNumCPUs() { return 1; } public List<CentroidCluster<Clusterable>> getClusters() { return this.clusters; } protected void setClusters(final List<CentroidCluster<Clusterable>> clusters) { this.clusters = clusters; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/DiscretizationHelper.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.math3.geometry.euclidean.oned.Interval; import org.apache.commons.math3.geometry.partitioning.Region.Location; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.schema.attribute.IAttribute; import org.api4.java.ai.ml.core.dataset.schema.attribute.INumericAttribute; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This helper class provides methods that are required in order to discretize * numeric attributes. * * @author Felix Weiland * * @param <I> * The instance type */ public class DiscretizationHelper implements ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(DiscretizationHelper.class); public enum DiscretizationStrategy { EQUAL_LENGTH, EQUAL_SIZE } public DiscretizationHelper() { super(); } /** * This method creates a default discretization policy for each numeric * attribute in the attributes that have to be considered for stratum * assignment. * * @param dataset * The data set that has to be sampled * @param indices * Indices of the attributes that have to be considered for stratum * assignment * @param attributeValues * Values of the relevant attributes * @param discretizationStrategy * The discretization strategy that has to be used * @param numberOfCategories * The number of categories to which the numeric values have to be * assigned * @return */ public Map<Integer, AttributeDiscretizationPolicy> createDefaultDiscretizationPolicies(final IDataset<?> dataset, final List<Integer> indices, final Map<Integer, Set<Object>> attributeValues, final DiscretizationStrategy discretizationStrategy, final int numberOfCategories) { Map<Integer, AttributeDiscretizationPolicy> discretizationPolicies = new HashMap<>(); // Only consider numeric attributes Set<Integer> indicesToConsider = this.getNumericIndicesFromDataset(dataset); indicesToConsider.retainAll(indices); for (int index : indicesToConsider) { // Get the (distinct) values in sorted order List<Double> numericValues = this.getSortedNumericValues(attributeValues, index); // No discretization needed if there are more categories than values if (numericValues.size() <= numberOfCategories) { this.logger.info("No discretization policy for attribute {} needed", index); continue; } switch (discretizationStrategy) { case EQUAL_SIZE: discretizationPolicies.put(index, this.equalSizePolicy(numericValues, numberOfCategories)); break; case EQUAL_LENGTH: discretizationPolicies.put(index, this.equalLengthPolicy(numericValues, numberOfCategories)); break; default: throw new IllegalArgumentException(String.format("Invalid strategy: %s", discretizationStrategy)); } } return discretizationPolicies; } /** * Creates an equal size policy for the given values with respect to the given * number of categories. An equal size policy is a policy where the length of * the intervals is chosen such that in each interval there are equally many * values. * * @param numericValues * Distinct attribute values in ascending order * @param numberOfCategories * Number of categories * @return The created discretization policy consisting of one interval per * category */ public AttributeDiscretizationPolicy equalSizePolicy(final List<Double> numericValues, final int numberOfCategories) { if (numericValues.isEmpty()) { throw new IllegalArgumentException("No values provided"); } List<Interval> intervals = new ArrayList<>(); int stepwidth = numericValues.size() / numberOfCategories; int limit = Math.min(numberOfCategories, numericValues.size()); for (int i = 0; i < limit; i++) { int lower = i * stepwidth; int upper; if (i == limit - 1) { // Take the rest of the values upper = numericValues.size() - 1; } else { upper = ((i + 1) * stepwidth) - 1; } intervals.add(new Interval(numericValues.get(lower), numericValues.get(upper))); } return new AttributeDiscretizationPolicy(intervals); } /** * Creates an equal length policy for the given values with respect to the given * number of categories. An equal length policy is a policy where the length of * the intervals is the same for all intervals. * * @param numericValues * Distinct attribute values in ascending order * @param numberOfCategories * Number of categories * @return The created discretization policy consisting of one interval per * category */ public AttributeDiscretizationPolicy equalLengthPolicy(final List<Double> numericValues, final int numberOfCategories) { List<Interval> intervals = new ArrayList<>(); double max = Collections.max(numericValues); double min = Collections.min(numericValues); double stepwidth = Math.abs(max - min) / numberOfCategories; for (int i = 0; i < numberOfCategories; i++) { double lower = min + (i * stepwidth); double upper = min + (i + 1) * stepwidth; intervals.add(new Interval(lower, upper)); } return new AttributeDiscretizationPolicy(intervals); } /** * Returns an ascending list of attribute values for the given attribute * * @param attributeValues * @param attributeIndex * @return */ private List<Double> getSortedNumericValues(final Map<Integer, Set<Object>> attributeValues, final int attributeIndex) { Set<Object> values = attributeValues.get(attributeIndex); List<Double> toReturn = new ArrayList<>(); values.forEach(v -> toReturn.add((Double) v)); Collections.sort(toReturn); return toReturn; } /** * Returns the set of attribute indices belonging to numeric attributes * * @param dataset * @return */ private Set<Integer> getNumericIndicesFromDataset(final IDataset<?> dataset) { Set<Integer> numericAttributes = new HashSet<>(); List<IAttribute> attributeTypes = new ArrayList<>(dataset.getListOfAttributes()); if (dataset instanceof ILabeledDataset && ((ILabeledDataset<?>) dataset).getLabelAttribute() instanceof INumericAttribute) { attributeTypes.add(((ILabeledDataset<?>) dataset).getLabelAttribute()); } for (int i = 0; i < attributeTypes.size(); i++) { IAttribute attributeType = attributeTypes.get(i); if (attributeType instanceof INumericAttribute) { numericAttributes.add(i); } } return numericAttributes; } /** * Discretizes the given attribute values with respect to the provided policies * * @param discretizationPolicies * @param attributeValues */ protected void discretizeAttributeValues(final Map<Integer, AttributeDiscretizationPolicy> discretizationPolicies, final Map<Integer, Set<Object>> attributeValues) { Set<Integer> numericIndices = discretizationPolicies.keySet(); for (int index : numericIndices) { Set<Object> originalValues = attributeValues.get(index); Set<Object> discretizedValues = new HashSet<>(); for (Object value : originalValues) { double d = (double) value; discretizedValues.add(this.discretize(d, discretizationPolicies.get(index))); } this.logger.info("Attribute index {}: Reduced values from {} to {}", index, originalValues.size(), discretizedValues.size()); attributeValues.put(index, discretizedValues); } } /** * Discretizes the particular provided value. Discretization in this case means * to replace the original value by a categorical value. The categorical value * is simply the index of the interval the value was assigned to. * * @param value * The (numeric) value to be discretized * @param policy * The policy that has to be used for discretization * @return */ protected int discretize(final double value, final AttributeDiscretizationPolicy policy) { List<Interval> intervals = policy.getIntervals(); // Find the interval to which the value belongs for (Interval i : intervals) { if (i.checkPoint(value, 0) != Location.OUTSIDE) { return intervals.indexOf(i); } } throw new IllegalStateException(String.format("Policy does not cover value %f", value)); } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/GMeansStratifier.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.List; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.ml.distance.ManhattanDistance; import org.api4.java.ai.ml.core.dataset.IDataset; import ai.libs.jaicore.basic.sets.ListView; import ai.libs.jaicore.ml.clustering.learner.GMeans; /** * Combined strati amount selector and strati assigner via g-means. IT can be * used in 3 combinations: * * 1) Amount Selector and Strati Assigner: A g-means clustering is initially * performed to select a strati amount via the amount of found clusters and to * assign datapoints with this clusters afterwards. * * 2) Amount Selector: A g-means clustering is initially performed to select a * suitable strati amount with the amount of found clusters. 3) Strati Assigner: * Since the amount of strati is selected with another component g-means cannot * be used and k-means will be used to be conform with the given strati amount. * * It is recommended to use combination 1, because only using this component for * only one of the two tasks could yield in inconsistent results. * * @author Lukas Brandt */ public class GMeansStratifier extends ClusterStratiAssigner implements IStratifier { private int maxInnerIterations = 100; /** * Constructor for GMeansStratiAmountSelectorAndAssigner with Manhattan * distanceMeasure as a default. * * @param randomSeed * Seed for random numbers. */ public GMeansStratifier(final int randomSeed) { this.randomSeed = randomSeed; this.distanceMeasure = new ManhattanDistance(); } /** * Constructor for GMeansStratiAmountSelectorAndAssigner with custom * distanceMeasure. * * @param distanceMeasure * Distance measure for datapoints, for example Manhattan or * Euclidian. * @param randomSeed * Seed for random numbers. */ public GMeansStratifier(final DistanceMeasure distanceMeasure, final int randomSeed) { this.randomSeed = randomSeed; this.distanceMeasure = distanceMeasure; } @Override public int createStrati(final IDataset<?> dataset) { // Perform g-means to get a fitting k and the corresponding clusters. this.setDataset(dataset); List<Clusterable> cDataset = new ListView<>(dataset); GMeans<Clusterable> clusterer = new GMeans<>(cDataset, this.distanceMeasure, this.maxInnerIterations, this.randomSeed); this.setClusters(clusterer.cluster()); return this.getClusters().size(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/IStratifier.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.common.control.IParallelizable; /** * @author Felix Mohr */ public interface IStratifier extends IParallelizable { /** * Prepares the stratification technique but does not assign instances to strati. * * @param dataset * @return The number of strati for the given dataset */ public int createStrati(IDataset<?> dataset); /** * Determines to which stratum this instance belongs * * @param instance * @return id of stratum */ public int getStratum(IInstance instance); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/KMeansStratifier.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.List; import org.apache.commons.math3.ml.clustering.Clusterable; import org.apache.commons.math3.ml.clustering.KMeansPlusPlusClusterer; import org.apache.commons.math3.ml.distance.DistanceMeasure; import org.apache.commons.math3.random.JDKRandomGenerator; import org.api4.java.ai.ml.core.dataset.IDataset; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Cluster the data set with k-means into k Clusters, where each cluster stands for one stratum. The datapoint assignment is performed with a lookup in the clusters. * * @author Lukas Brandt */ public class KMeansStratifier extends ClusterStratiAssigner { private Logger logger = LoggerFactory.getLogger(KMeansStratifier.class); private final int numberOfStrati; /** * Constructor for KMeansStratiAssigner. * * @param distanceMeasure * Distance measure for datapoints, for example Manhattan or Euclidian. * @param randomSeed * Seed for random numbers. */ public KMeansStratifier(final int numberOfStrati, final DistanceMeasure distanceMeasure, final int randomSeed) { this.numberOfStrati = numberOfStrati; this.randomSeed = randomSeed; this.distanceMeasure = distanceMeasure; } @Override public int createStrati(final IDataset<?> dataset) { this.setDataset(dataset); // Perform initial Clustering of the dataset. JDKRandomGenerator rand = new JDKRandomGenerator(); rand.setSeed(this.randomSeed); @SuppressWarnings("unchecked") List<Clusterable> cDataset = (List<Clusterable>) dataset; KMeansPlusPlusClusterer<Clusterable> clusterer = new KMeansPlusPlusClusterer<>(this.numberOfStrati, -1, this.distanceMeasure, rand); this.logger.info("Clustering dataset with {} instances.", dataset.size()); this.setClusters(clusterer.cluster(cDataset)); this.logger.info("Finished clustering"); return this.numberOfStrati; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/LabelBasedStratifiedSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.util.Arrays; import java.util.Random; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; public class LabelBasedStratifiedSampling<D extends ILabeledDataset<?>> extends StratifiedSampling<D> { public LabelBasedStratifiedSampling(final Random random, final D input) { this(new AttributeBasedStratifier(Arrays.asList(input.getNumAttributes())), random, input); } private LabelBasedStratifiedSampling(final AttributeBasedStratifier assigner, final Random random, final D input) { super(assigner, random, input); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/sampling/inmemory/stratified/sampling/StratifiedSampling.java
package ai.libs.jaicore.ml.core.filter.sampling.inmemory.stratified.sampling; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.IInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.ml.core.dataset.DatasetDeriver; import ai.libs.jaicore.ml.core.filter.sampling.SampleElementAddedEvent; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.SimpleRandomSampling; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.WaitForSamplingStepEvent; /** * Implementation of Stratified Sampling: Divide dataset into strati and sample * from each of these. * * @author Lukas Brandt */ public class StratifiedSampling<D extends IDataset<?>> extends ASamplingAlgorithm<D> { private Logger logger = LoggerFactory.getLogger(StratifiedSampling.class); private IStratifier stratificationTechnique; private Random random; private DatasetDeriver<D>[] stratiBuilder = null; private boolean allDatapointsAssigned = false; private boolean simpleRandomSamplingStarted; /** * Constructor for Stratified Sampling. * * @param stratiAmountSelector * The custom selector for the used amount of strati. * @param stratiAssigner * Custom logic to assign datapoints into strati. * @param random * Random object for sampling inside of the strati. */ public StratifiedSampling(final IStratifier stratificationTechnique, final Random random, final D input) { super(input); this.stratificationTechnique = stratificationTechnique; this.random = random; } @SuppressWarnings("unchecked") @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { switch (this.getState()) { case CREATED: if (!this.allDatapointsAssigned) { int dsHash = this.getInput().hashCode(); this.stratificationTechnique.setNumCPUs(this.getNumCPUs()); /* create strati builder */ this.stratiBuilder = (DatasetDeriver<D>[]) Array.newInstance(DatasetDeriver.class, this.stratificationTechnique.createStrati(this.getInput())); if (this.stratiBuilder.length == 0) { throw new IllegalStateException("Stratification technique has not created any stratum."); } for (int i = 0; i < this.stratiBuilder.length; i++) { this.stratiBuilder[i] = new DatasetDeriver<>(this.getInput()); } if (this.getInput().hashCode() != dsHash) { throw new IllegalStateException("Original dataset has been modified!"); } } this.simpleRandomSamplingStarted = false; this.logger.info("Stratified sampler initialized."); return this.activate(); case ACTIVE: if (!this.allDatapointsAssigned) { /* sort all points into their respective stratum */ this.logger.info("Starting to sort all datapoints into their strati."); D dataset = this.getInput(); int n = dataset.size(); for (int i = 0; i < n; i++) { IInstance datapoint = dataset.get(i); if (i % 100 == 0) { this.checkAndConductTermination(); } this.logger.debug("Computing stratum for next data point {}", datapoint); int assignedStratum = this.stratificationTechnique.getStratum(datapoint); if (assignedStratum < 0 || assignedStratum >= this.stratiBuilder.length) { throw new AlgorithmException("No existing strati for index " + assignedStratum); } else { this.stratiBuilder[assignedStratum].add(i); // adding i is MUCH more efficient than adding datapoint } this.logger.debug("Added data point {} to stratum {}. {} datapoints remaining.", datapoint, assignedStratum, n - i - 1); } /* check number of samples */ this.allDatapointsAssigned = true; int totalItemsAssigned = 0; for (DatasetDeriver<D> d : this.stratiBuilder) { this.logger.debug("Elements in stratum: {}", d.currentSizeOfTarget()); totalItemsAssigned += d.currentSizeOfTarget(); } this.logger.info("Finished stratum assignments. Assigned {} data points in total.", totalItemsAssigned); if (totalItemsAssigned != this.getInput().size()) { throw new IllegalStateException("Not all data have been collected."); } return new SampleElementAddedEvent(this); } else { if (!this.simpleRandomSamplingStarted) { /* Simple Random Sampling has not started yet -> Initialize one sampling thread per stratum. */ try { this.startSimpleRandomSamplingForStrati(); } catch (DatasetCreationException e) { throw new AlgorithmException("Could not create sample from strati.", e); } this.simpleRandomSamplingStarted = true; return new WaitForSamplingStepEvent(this); } else { /* Check if all threads are finished. If yes finish Stratified Sampling, wait shortly in this step otherwise. */ this.logger.info("Stratified sampling completed."); return this.terminate(); } } case INACTIVE: if (this.sample.size() < this.sampleSize) { throw new AlgorithmException("Expected sample size was not reached before termination"); } else { return this.terminate(); } default: throw new IllegalStateException("Unknown algorithm state " + this.getState()); } } /** * Calculates the necessary sample sizes and start a Simple Random Sampling * Thread for each stratum. * @throws DatasetCreationException * @throws InterruptedException * @throws AlgorithmExecutionCanceledException * @throws AlgorithmTimeoutedException */ private void startSimpleRandomSamplingForStrati() throws InterruptedException, DatasetCreationException, AlgorithmTimeoutedException, AlgorithmExecutionCanceledException { if (this.sampleSize == -1) { throw new IllegalStateException("No valid sample size specified"); } /* Calculate the amount of datapoints that will be used from each strati. * First, floor all fractional numbers. Then, distribute the remaining samples randomly among the strati */ this.logger.info("Now drawing simple random elements in each stratum."); int[] sampleSizeForStrati = new int[this.stratiBuilder.length]; int numSamplesTotal = 0; List<Integer> fillupStrati = new ArrayList<>(); double totalInputSize = this.getInput().size(); for (int i = 0; i < this.stratiBuilder.length; i++) { if (this.stratiBuilder[i].currentSizeOfTarget() < 0) { throw new IllegalStateException("Builder for stratum " + i + " has a negative current target size: " + this.stratiBuilder[i].currentSizeOfTarget()); } int totalNumberOfElementsInStratum = this.stratiBuilder[i].currentSizeOfTarget(); sampleSizeForStrati[i] = (int) Math.floor(totalNumberOfElementsInStratum * (this.sampleSize / totalInputSize)); if (sampleSizeForStrati[i] < 0) { throw new IllegalStateException("Determined negative stratum size " + sampleSizeForStrati[i] + " for " + i + "-th stratum."); } numSamplesTotal += sampleSizeForStrati[i]; fillupStrati.add(i); } while (numSamplesTotal < this.sampleSize) { Collections.shuffle(fillupStrati, this.random); int indexForNextFillUp = fillupStrati.remove(0); sampleSizeForStrati[indexForNextFillUp]++; numSamplesTotal++; } if (numSamplesTotal != this.sampleSize) { throw new IllegalStateException("Number of samples is " + numSamplesTotal + " where it should be " + this.sampleSize); } int stratiSumCheck = 0; for (int i = 0; i < this.stratiBuilder.length; i++) { stratiSumCheck += sampleSizeForStrati[i]; } if (stratiSumCheck != this.sampleSize) { throw new IllegalStateException("The total number of samples assigned within the strati is " + stratiSumCheck + ", but it should be " + this.sampleSize + "."); } /* conduct a Simple Random Sampling for each stratum */ DatasetDeriver<D> sampleDeriver = new DatasetDeriver<>(this.getInput()); for (int i = 0; i < this.stratiBuilder.length; i++) { final DatasetDeriver<D> stratumBuilder = this.stratiBuilder[i]; D stratum = stratumBuilder.build(); if (stratum.isEmpty()) { this.logger.warn("{}-th stratum is empty!", i); } else if (sampleSizeForStrati[i] == 0) { this.logger.warn("No samples for stratum {}", i); } else if (sampleSizeForStrati[i] == stratum.size()) { sampleDeriver.addIndices(stratumBuilder.getIndicesOfNewInstancesInOriginalDataset()); // add the complete stratum } else { this.checkAndConductTermination(); SimpleRandomSampling<D> simpleRandomSampling = new SimpleRandomSampling<>(this.random, stratum); simpleRandomSampling.setSampleSize(sampleSizeForStrati[i]); this.logger.info("Setting sample size for {}-th stratus to {}", i, sampleSizeForStrati[i]); try { this.logger.debug("Calling SimpleRandomSampling"); simpleRandomSampling.call(); this.logger.debug("SimpleRandomSampling finished"); } catch (InterruptedException e) { throw e; } catch (Exception e) { this.logger.error("Unexpected exception during simple random sampling!", e); } if (simpleRandomSampling.getChosenIndices().size() != sampleSizeForStrati[i]) { throw new IllegalStateException("Number of samples drawn for stratum " + i + " is " + simpleRandomSampling.getChosenIndices().size() + ", but it should be " + sampleSizeForStrati[i]); } sampleDeriver.addIndices(stratumBuilder.getIndicesOfNewInstancesInOriginalDataset(simpleRandomSampling.getChosenIndices())); // this is MUCH faster than adding the instances } } if (sampleDeriver.currentSizeOfTarget() != this.sampleSize) { throw new IllegalStateException("The deriver says that the target has " + sampleDeriver.currentSizeOfTarget() + " elements, but it should have been configured for " + this.sampleSize); } this.checkAndConductTermination(); this.logger.info("Strati sub-samples completed, building the final sample and shuffling it."); this.sample = sampleDeriver.build(); if (this.sample.size() != numSamplesTotal) { throw new IllegalStateException("The sample deriver has produced a sample with " + this.sample.size() + " elements while it should have " + numSamplesTotal); } Collections.shuffle(this.sample, this.random); // up to here, instances have been ordered by their class. We now mix instances of the classes again. this.logger.info("Overall stratified shuffled sample completed."); } @Override public void setLoggerName(final String loggername) { this.logger = LoggerFactory.getLogger(loggername); if (this.stratificationTechnique instanceof ILoggingCustomizable) { ((ILoggingCustomizable) this.stratificationTechnique).setLoggerName(loggername + ".stratifier"); } } @Override public String getLoggerName() { return this.logger.getName(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/supervised/package-info.java
package ai.libs.jaicore.ml.core.filter.supervised;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/filter/unsupervised/package-info.java
package ai.libs.jaicore.ml.core.filter.unsupervised;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/learner/ASupervisedLearner.java
package ai.libs.jaicore.ml.core.learner; import java.lang.reflect.Array; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.IPrediction; import org.api4.java.ai.ml.core.evaluation.IPredictionBatch; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.core.exception.LearnerConfigurationFailedException; import org.api4.java.ai.ml.core.exception.PredictionException; import org.api4.java.ai.ml.core.exception.TrainingException; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; public abstract class ASupervisedLearner<I extends ILabeledInstance, D extends ILabeledDataset<? extends I>, P extends IPrediction, B extends IPredictionBatch> implements ISupervisedLearner<I, D> { private Map<String, Object> config; protected ASupervisedLearner(final Map<String, Object> config) { this.config = config; } protected ASupervisedLearner() { this.config = new HashMap<>(); } @Override public P fitAndPredict(final D dTrain, final I xTest) throws TrainingException, PredictionException, InterruptedException { I[] xTestAsArray = (I[]) Array.newInstance(xTest.getClass(), 1); xTestAsArray[0] = xTest; return (P) this.fitAndPredict(dTrain, xTestAsArray).get(0); } @Override public B fitAndPredict(final D dTrain, final I[] xTest) throws TrainingException, PredictionException, InterruptedException { try { ILabeledDataset dTest = dTrain.createEmptyCopy(); Arrays.stream(xTest).forEach(dTest::add); return this.fitAndPredict(dTrain, (D) dTest); } catch (DatasetCreationException e) { throw new PredictionException("Could not create test dataset from array of instances"); } } @Override public B fitAndPredict(final D dTrain, final D dTest) throws TrainingException, PredictionException, InterruptedException { this.fit(dTrain); return this.predict(dTest); } @SuppressWarnings("unchecked") @Override public B predict(final D dTest) throws PredictionException, InterruptedException { Class<I> clazz = (Class<I>) dTest.iterator().next().getClass(); I[] instancesAsArray = (I[]) Array.newInstance(clazz, dTest.size()); for (int i = 0; i < dTest.size(); i++) { instancesAsArray[i] = dTest.get(i); } return this.predict(instancesAsArray); } @Override public abstract P predict(I xTest) throws PredictionException, InterruptedException; @Override public abstract B predict(I[] dTest) throws PredictionException, InterruptedException; @Override public void setConfig(final Map<String, Object> config) throws LearnerConfigurationFailedException, InterruptedException { this.config = config; } @Override public Map<String, Object> getConfig() { return this.config; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/core/learner/package-info.java
/** * */ /** * @author mwever * */ package ai.libs.jaicore.ml.core.learner;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/package-info.java
/** * */ /** * @author mwever * */ package ai.libs.jaicore.ml.functionprediction;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/ConfigurationLearningCurveExtrapolator.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lcnet.LCNetExtrapolationMethod; /** * This class is a subclass of LearningCurveExtrapolator which deals * with the slightly different setup that is required by the LCNet * of pybnn * * @author noni4 */ public class ConfigurationLearningCurveExtrapolator extends LearningCurveExtrapolator { public ConfigurationLearningCurveExtrapolator(final ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> learner, final ILabeledDataset<?> dataset, final double trainsplit, final int[] anchorpoints, final ISamplingAlgorithmFactory<ILabeledDataset<?>, ASamplingAlgorithm<ILabeledDataset<?>>> samplingAlgorithmFactory, final long seed, final String identifier, final double[] configurations) throws DatasetCreationException, InterruptedException { super(null, learner, dataset, trainsplit, anchorpoints, samplingAlgorithmFactory, seed); this.extrapolationMethod = new LCNetExtrapolationMethod(identifier); ((LCNetExtrapolationMethod) this.extrapolationMethod).setConfigurations(configurations); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/InvalidAnchorPointsException.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation; /** * Exception that is thrown, when the anchorpoints generated for learning curve * extrapolation are not suitable. * * @author Lukas Brandt */ public class InvalidAnchorPointsException extends Exception { private static final long serialVersionUID = 779132065752019479L; }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/LearningCurveExtrapolatedEvent.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation; import org.api4.java.common.event.IEvent; public class LearningCurveExtrapolatedEvent implements IEvent { private final LearningCurveExtrapolator extrapolator; public LearningCurveExtrapolatedEvent(final LearningCurveExtrapolator extrapolator) { super(); this.extrapolator = extrapolator; } public LearningCurveExtrapolator getExtrapolator() { return this.extrapolator; } @Override public long getTimestamp() { throw new UnsupportedOperationException(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/LearningCurveExtrapolationMethod.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation; import java.util.concurrent.ExecutionException; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; /** * Functional interface for extrapolating a learning curve from anchorpoints. * * @author Lukas Brandt */ @FunctionalInterface public interface LearningCurveExtrapolationMethod { public ILearningCurve extrapolateLearningCurveFromAnchorPoints(int[] xValues, double[] yValues, int dataSetSize) throws InvalidAnchorPointsException, InterruptedException, ExecutionException; }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/LearningCurveExtrapolator.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import org.api4.java.ai.ml.classification.singlelabel.evaluation.ISingleLabelClassification; import org.api4.java.ai.ml.core.dataset.splitter.SplitFailedException; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import org.api4.java.ai.ml.core.evaluation.execution.ILearnerRunReport; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.core.learner.ISupervisedLearner; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.common.control.ILoggingCustomizable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.ml.classification.loss.dataset.EClassificationPerformanceMeasure; import ai.libs.jaicore.ml.core.evaluation.evaluator.SupervisedLearnerExecutor; import ai.libs.jaicore.ml.core.filter.SplitterUtil; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.ASamplingAlgorithm; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.IRerunnableSamplingAlgorithmFactory; import ai.libs.jaicore.ml.core.filter.sampling.inmemory.factories.interfaces.ISamplingAlgorithmFactory; /** * Abstract class for implementing a learning curve extrapolation method with * some anchor points. For each of this anchorpoints a subsample will be drawn * and a classifier will be trained with this sample. Based on the points * (subsample size, learner accuracy) a custom method of learning curve * extrapolation can be applied. * * @author Lukas Brandt * @author Felix Mohr */ public class LearningCurveExtrapolator implements ILoggingCustomizable { private Logger logger = LoggerFactory.getLogger(LearningCurveExtrapolator.class); protected ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> learner; protected ILabeledDataset<? extends ILabeledInstance> dataset; protected ILabeledDataset<? extends ILabeledInstance> train; protected ILabeledDataset<? extends ILabeledInstance> test; protected ISamplingAlgorithmFactory<ILabeledDataset<?>, ? extends ASamplingAlgorithm<ILabeledDataset<?>>> samplingAlgorithmFactory; protected ASamplingAlgorithm<ILabeledDataset<? extends ILabeledInstance>> samplingAlgorithm; protected Random random; protected LearningCurveExtrapolationMethod extrapolationMethod; private final int[] anchorPoints; private final double[] yValues; private final int[] trainingTimes; /** * Create a learning curve extrapolator with a subsampling factory. * * @param extrapolationMethod Method for extrapolating a learning curve * from anchorpoints. * @param learner Learning model to predict the learning curve * of. * @param dataset Dataset to measure evaluate the learner on. * @param trainsplit Portion of the dataset, which shall be used * to sample from for training. * @param samplingAlgorithmFactory Subsampling algorithm factory to create a * configured subsampler with. * @param seed Random seed. * @throws DatasetCreationException * @throws InterruptedException */ public LearningCurveExtrapolator(final LearningCurveExtrapolationMethod extrapolationMethod, final ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> learner, final ILabeledDataset<?> dataset, final double trainsplit, final int[] anchorPoints, final ISamplingAlgorithmFactory<ILabeledDataset<?>, ? extends ASamplingAlgorithm<ILabeledDataset<?>>> samplingAlgorithmFactory, final long seed) throws DatasetCreationException, InterruptedException { this.extrapolationMethod = extrapolationMethod; this.learner = learner; this.dataset = dataset; this.anchorPoints = anchorPoints; this.samplingAlgorithmFactory = samplingAlgorithmFactory; this.samplingAlgorithm = null; this.random = new Random(seed); this.createSplit(trainsplit, seed); this.yValues = new double[this.anchorPoints.length]; this.trainingTimes = new int[this.anchorPoints.length]; } /** * Measure the learner accuracy at the given anchorpoints and extrapolate a * learning curve based the results. * * @param anchorPoints Sample sizes as anchorpoints, where the true accuracy * shall be measured. * @return The extrapolated learning curve. * * @throws InvalidAnchorPointsException The anchorpoints (amount, values, ...) * are not suitable for the given learning * curve extrapolation method. * @throws AlgorithmException An error occured during the creation of * the specified anchorpoints. * @throws InterruptedException */ @SuppressWarnings("unchecked") public ILearningCurve extrapolateLearningCurve() throws InvalidAnchorPointsException, AlgorithmException, InterruptedException { try { ILabeledDataset<? extends ILabeledInstance> testInstances = this.test; // Create subsamples at the anchorpoints and measure the accuracy there. SupervisedLearnerExecutor learnerExecutor = new SupervisedLearnerExecutor(); IDeterministicPredictionPerformanceMeasure<Integer, ISingleLabelClassification> metric = EClassificationPerformanceMeasure.ERRORRATE; for (int i = 0; i < this.anchorPoints.length; i++) { // If it is a rerunnable factory, set the previous run. if (this.samplingAlgorithmFactory instanceof IRerunnableSamplingAlgorithmFactory && this.samplingAlgorithm != null) { ((IRerunnableSamplingAlgorithmFactory<ILabeledDataset<?>, ASamplingAlgorithm<ILabeledDataset<?>>>) this.samplingAlgorithmFactory).setPreviousRun(this.samplingAlgorithm); } this.samplingAlgorithm = this.samplingAlgorithmFactory.getAlgorithm(this.anchorPoints[i], this.train, this.random); ILabeledDataset<? extends ILabeledInstance> subsampledDataset = this.samplingAlgorithm.call(); // Train classifier on subsample. this.logger.debug("Running classifier with {} data points.", this.anchorPoints[i]); ILearnerRunReport report = learnerExecutor.execute(this.learner, subsampledDataset, testInstances); this.trainingTimes[i] = (int) (report.getTrainEndTime() - report.getTrainStartTime()); // Measure accuracy of the trained learner on test split. this.yValues[i] = metric.loss(report.getPredictionDiffList().getCastedView(Integer.class, ISingleLabelClassification.class)); this.logger.debug("Training finished. Observed learning curve value (accuracy) of {}.", this.yValues[i]); } if (this.logger.isInfoEnabled()) { this.logger.info("Computed accuracies of {} for anchor points {}. Now extrapolating a curve from these observations.", Arrays.toString(this.yValues), Arrays.toString(this.anchorPoints)); } return this.extrapolationMethod.extrapolateLearningCurveFromAnchorPoints(this.anchorPoints, this.yValues, this.dataset.size()); } catch (AlgorithmExecutionCanceledException | TimeoutException | AlgorithmException e) { throw new AlgorithmException("Error during creation of the subsamples for the anchorpoints", e); } catch (ExecutionException e) { throw new AlgorithmException("Error during learning curve extrapolation", e); } catch (InvalidAnchorPointsException | InterruptedException e) { throw e; } catch (Exception e) { throw new AlgorithmException("Error during training/testing the classifier", e); } } private void createSplit(final double trainsplit, final long seed) throws DatasetCreationException, InterruptedException { long start = System.currentTimeMillis(); this.logger.debug("Creating split with training portion {} and seed {}", trainsplit, seed); Random r = new Random(seed); try { List<ILabeledDataset<?>> folds = SplitterUtil.getLabelStratifiedTrainTestSplit(this.dataset, seed, trainsplit); this.train = folds.get(0); this.test = folds.get(1); // Shuffle train and test this.logger.debug("Shuffling train and test data"); Collections.shuffle(this.train, r); Collections.shuffle(this.test, r); this.logger.debug("Finished split creation after {}ms", System.currentTimeMillis() - start); } catch (SplitFailedException e) { throw new DatasetCreationException(e); } } public ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>> getLearner() { return this.learner; } public ILabeledDataset<?> getDataset() { return this.dataset; } public LearningCurveExtrapolationMethod getExtrapolationMethod() { return this.extrapolationMethod; } public int[] getAnchorPoints() { return this.anchorPoints; } public double[] getyValues() { return this.yValues; } public int[] getTrainingTimes() { return this.trainingTimes; } @Override public String getLoggerName() { return this.logger.getName(); } @Override public void setLoggerName(final String name) { this.logger = LoggerFactory.getLogger(name); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/client/ExtrapolationRequest.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.client; import java.util.List; /** * This class describes the request that is sent to an Extrapolation Service. It * contains the x- and y-values of the anchor points. * * @author Felix Weiland * */ public class ExtrapolationRequest { private List<Integer> xValues; private List<Double> yValues; private Integer numSamples; public List<Integer> getxValues() { return this.xValues; } public void setxValues(final List<Integer> xValues) { this.xValues = xValues; } public List<Double> getyValues() { return this.yValues; } public void setyValues(final List<Double> yValues) { this.yValues = yValues; } public Integer getNumSamples() { return this.numSamples; } public void setNumSamples(final Integer numSamples) { this.numSamples = numSamples; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.numSamples == null) ? 0 : this.numSamples.hashCode()); result = prime * result + ((this.xValues == null) ? 0 : this.xValues.hashCode()); result = prime * result + ((this.yValues == null) ? 0 : this.yValues.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } ExtrapolationRequest other = (ExtrapolationRequest) obj; if (this.numSamples == null) { if (other.numSamples != null) { return false; } } else if (!this.numSamples.equals(other.numSamples)) { return false; } if (this.xValues == null) { if (other.xValues != null) { return false; } } else if (!this.xValues.equals(other.xValues)) { return false; } if (this.yValues == null) { if (other.yValues != null) { return false; } } else if (!this.yValues.equals(other.yValues)) { return false; } return true; } @Override public String toString() { return "ExtrapolationRequest [xValues=" + this.xValues + ", yValues=" + this.yValues + ", numSamples=" + this.numSamples + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/client/ExtrapolationServiceClient.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.client; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.InvalidAnchorPointsException; /** * This class describes the client that is responsible for the communication * with an Extrapolation Service. The client accepts x- and y-values of anchor * points, creates a request and sends this request to an Extrapolation Service. * The configuration which was computed by the Extrapolation Service is returned * after the response has been received. * * @author Felix Weiland * @author Lukas Brandt * */ public class ExtrapolationServiceClient<C> { private String serviceUrl; private Class<C> configClass; public ExtrapolationServiceClient(final String serviceUrl, final Class<C> configClass) { this.serviceUrl = serviceUrl; this.configClass = configClass; } public C getConfigForAnchorPoints(final int[] xValuesArr, final double[] yValuesArr) throws InvalidAnchorPointsException, InterruptedException, ExecutionException { /* Create request */ ExtrapolationRequest request = new ExtrapolationRequest(); List<Integer> xValues = new ArrayList<>(); for (int x : xValuesArr) { xValues.add(x); } List<Double> yValues = new ArrayList<>(); for (double y : yValuesArr) { yValues.add(y); } request.setxValues(xValues); request.setyValues(yValues); /* Create service client */ Client client = ClientBuilder.newClient(); WebTarget target = null; try { target = client.target(new URI(this.serviceUrl)); } catch (Exception e) { throw new IllegalStateException("No WebTarget!", e); } /* Send request and wait for response */ Future<Response> future = target.request(MediaType.APPLICATION_JSON).async().post(Entity.entity(request, MediaType.APPLICATION_JSON)); Response response; response = future.get(); if (response.getStatus() == 500 && response.readEntity(String.class).equals("Invalid anchorpoints")) { throw new InvalidAnchorPointsException(); } return response.readEntity(this.configClass); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/ipl/InversePowerLawConfiguration.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.ipl; /** * This class encapsulates the three parameters that are required in order to * create a Inverse Power Law function. * * @author Lukas Brandt * */ public class InversePowerLawConfiguration { // Inverse Power Law parameters private double a; private double b; private double c; public double getA() { return this.a; } public void setA(double a) { this.a = a; } public double getB() { return this.b; } public void setB(double b) { this.b = b; } public double getC() { return this.c; } public void setC(double c) { this.c = c; } @Override public String toString() { return "InversePowerLawConfiguration [a=" + this.a + ", b=" + this.b + ", c=" + this.c + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/ipl/InversePowerLawExtrapolationMethod.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.ipl; import java.util.concurrent.ExecutionException; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.InvalidAnchorPointsException; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.LearningCurveExtrapolationMethod; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.client.ExtrapolationServiceClient; /** * This class describes a method for learning curve extrapolation which * generates an Inverse Power Law function. The parameter of this function are * predicted in an external component that is called via HTTP. * * @author Lukas Brandt * */ public class InversePowerLawExtrapolationMethod implements LearningCurveExtrapolationMethod { // We assume the service to be running locally private static final String ENDPOINT = "/jaicore/web/api/v1/ipl/modelparams"; private static final String DEFAULT_HOST = "localhost"; private static final String DEFAULT_PORT = "8081"; private String serviceUrl; public InversePowerLawExtrapolationMethod() { this.serviceUrl = "http://" + DEFAULT_HOST + ":" + DEFAULT_PORT + ENDPOINT; } public InversePowerLawExtrapolationMethod(String serviceHost, String port) { this.serviceUrl = "http://" + serviceHost + ":" + port + ENDPOINT; } @Override public ILearningCurve extrapolateLearningCurveFromAnchorPoints(int[] xValues, double[] yValues, int dataSetSize) throws InvalidAnchorPointsException, InterruptedException, ExecutionException { // Request model parameters to create learning curve ExtrapolationServiceClient<InversePowerLawConfiguration> client = new ExtrapolationServiceClient<>(serviceUrl, InversePowerLawConfiguration.class); InversePowerLawConfiguration configuration = client.getConfigForAnchorPoints(xValues, yValues); configuration.setA(Math.max(0.00000000001, Math.min(configuration.getA(), 0.99999999999))); configuration.setC(Math.max(-0.99999999999, Math.min(configuration.getC(), -0.00000000001))); return new InversePowerLawLearningCurve(configuration); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/ipl/InversePowerLawLearningCurve.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.ipl; import java.math.BigDecimal; import org.apache.commons.math3.analysis.solvers.BrentSolver; import org.apache.commons.math3.analysis.solvers.UnivariateSolver; import org.apache.commons.math3.exception.NoBracketingException; import org.api4.java.ai.ml.core.evaluation.learningcurve.IAnalyticalLearningCurve; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Representation of a learning curve with the Inverse Power Law function, which has three parameters named a, b and c. The function is f(x) = (1-a) - b * x^c. O * * @author Lukas Brandt * */ public class InversePowerLawLearningCurve implements IAnalyticalLearningCurve { private Logger logger = LoggerFactory.getLogger(InversePowerLawLearningCurve.class); private double a; private double b; private double c; public InversePowerLawLearningCurve(final double a, final double b, final double c) { if (!(a > 0 && a < 1)) { throw new IllegalArgumentException("Parameter a has to be in (0,1)"); } if (!(c > -1 && c < 0)) { throw new IllegalArgumentException("Parameter c has to be in (-1,0)"); } this.a = a; this.b = b; this.c = c; } public InversePowerLawLearningCurve(final InversePowerLawConfiguration configuration) { if (!(configuration.getA() > 0 && configuration.getA() < 1)) { throw new IllegalArgumentException("Parameter a has to be in (0,1)"); } if (!(configuration.getC() > -1 && configuration.getC() < 0)) { throw new IllegalArgumentException("Parameter c has to be in (-1,0)"); } this.a = configuration.getA(); this.b = configuration.getB(); this.c = configuration.getC(); } @Override public double getSaturationPoint(final double epsilon) { if (epsilon <= 0) { throw new IllegalArgumentException("Parameter epsilon has to be >= 0"); } double n = this.c - 1.0d; double base = -(epsilon / (this.b * this.c)); return Math.pow(Math.E, Math.log(base) / n); } @Override public double getCurveValue(final double x) { return (1.0d - this.a) - this.b * Math.pow(x, this.c); } @Override public double getDerivativeCurveValue(final double x) { return (-this.b) * this.c * Math.pow(x, this.c - 1.0d); } @Override public String toString() { return "(1 - " + BigDecimal.valueOf(this.a).toPlainString() + ") - " + BigDecimal.valueOf(this.b).toPlainString() + " * x ^ " + BigDecimal.valueOf(this.c).toPlainString(); } @Override public double getConvergenceValue() { UnivariateSolver solver = new BrentSolver(0, 1.0d); double convergencePoint = -1; int upperIntervalBound = 10000; int retriesLeft = 8; while (retriesLeft > 0 && convergencePoint == -1) { try { convergencePoint = solver.solve(1000, x -> this.getDerivativeCurveValue(x) - 0.0000001, 1, upperIntervalBound); } catch (NoBracketingException e) { this.logger.warn(String.format("No solution could be found in interval [1,%d]", upperIntervalBound)); retriesLeft--; upperIntervalBound *= 2; } } if (convergencePoint == -1) { throw new IllegalStateException(String.format("No solution could be found in interval [1,%d]", upperIntervalBound)); } return this.getCurveValue(convergencePoint); } public double getA() { return this.a; } public double getB() { return this.b; } public double getC() { return this.c; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationConstants.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; /** * This class contains required constant names for the linear combination * learning curve. * * @author Felix Weiland * */ public class LinearCombinationConstants { private LinearCombinationConstants() { } // Function names public static final String VAPOR_PRESSURE = "vapor_pressure"; public static final String POW_3 = "pow_3"; public static final String LOG_LOG_LINEAR = "log_log_linear"; public static final String HILL_3 = "hill_3"; public static final String LOG_POWER = "log_power"; public static final String POW_4 = "pow_4"; public static final String MMF = "mmf"; public static final String EXP_4 = "exp_4"; public static final String JANOSCHEK = "janoschek"; public static final String WEIBULL = "weibull"; public static final String ILOG_2 = "ilog_2"; // Parameter names public static final String A = "a"; public static final String B = "b"; public static final String C = "c"; public static final String E = "e"; public static final String Y = "y"; public static final String ALPHA = "alpha"; public static final String BETA = "beta"; public static final String DELTA = "delta"; public static final String ETA = "eta"; public static final String KAPPA = "kappa"; }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationExtrapolationMethod.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.concurrent.ExecutionException; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.InvalidAnchorPointsException; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.LearningCurveExtrapolationMethod; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.client.ExtrapolationServiceClient; /** * This class describes a method for learning curve extrapolation which * generates a linear combination of suitable functions. The parameter of these * functions as well as the weighting of the are sampled via MCMC. The actual * MCMC sampling is done in an external component that is called via HTTP. * * @author Felix Weiland * */ public class LinearCombinationExtrapolationMethod implements LearningCurveExtrapolationMethod { // We assume the service to be running locally private static final String ENDPOINT = "/jaicore/web/api/v1/mcmc/modelparams"; private static final String DEFAULT_HOST = "localhost"; private static final String DEFAULT_PORT = "8080"; private String serviceUrl; public LinearCombinationExtrapolationMethod() { this.serviceUrl = "http://" + DEFAULT_HOST + ":" + DEFAULT_PORT + ENDPOINT; } public LinearCombinationExtrapolationMethod(String serviceHost, String port) { this.serviceUrl = "http://" + serviceHost + ":" + port + ENDPOINT; } @Override public ILearningCurve extrapolateLearningCurveFromAnchorPoints(int[] xValues, double[] yValues, int dataSetSize) throws InvalidAnchorPointsException, InterruptedException, ExecutionException { // Request model parameters to create learning curve ExtrapolationServiceClient<LinearCombinationLearningCurveConfiguration> client = new ExtrapolationServiceClient<>( serviceUrl, LinearCombinationLearningCurveConfiguration.class); LinearCombinationLearningCurveConfiguration configuration = client.getConfigForAnchorPoints(xValues, yValues); return new LinearCombinationLearningCurve(configuration, dataSetSize); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationFunction.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.List; import org.apache.commons.math3.analysis.UnivariateFunction; /** * This is a basic class that describes a function which is a weighted * combination of individual functions. * * @author Felix Weiland * */ public class LinearCombinationFunction implements UnivariateFunction { /** Functions the linear combination consists of */ private List<UnivariateFunction> functions; /** * Weights of the individual functions. For reasonable results, the sum of the * weights should be 1. */ private List<Double> weights; /** Offset, which is added to the value of the linear combination */ private double offset; public LinearCombinationFunction(List<UnivariateFunction> functions, List<Double> weights) { super(); this.functions = functions; this.weights = weights; } public List<UnivariateFunction> getFunctions() { return functions; } public void setFunctions(List<UnivariateFunction> functions) { this.functions = functions; } public List<Double> getWeights() { return weights; } public void setWeights(List<Double> weights) { this.weights = weights; } public double getOffset() { return offset; } public void setOffset(double offset) { this.offset = offset; } @Override public double value(double x) { double value = 0; for (int i = 0; i < functions.size(); i++) { value += functions.get(i).value(x) * weights.get(i); } return value + offset; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationLearningCurve.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.ArrayList; import java.util.List; import org.apache.commons.math3.analysis.UnivariateFunction; import org.apache.commons.math3.analysis.solvers.BrentSolver; import org.apache.commons.math3.analysis.solvers.UnivariateSolver; import org.apache.commons.math3.exception.NoBracketingException; import org.api4.java.ai.ml.core.evaluation.learningcurve.IAnalyticalLearningCurve; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The LinearCombinationLearningCurve consists of the actual linear combination * function that describes the learning curve, as well as the derivative of this * function. The derivative is used in order to calculate the saturation point. * * @author Felix Weiland * */ public class LinearCombinationLearningCurve implements IAnalyticalLearningCurve { private static final Logger LOG = LoggerFactory.getLogger(LinearCombinationLearningCurve.class); /** * Constant value describing the number of times the size of the interval in * which a root is searched is doubled */ private static final int ROOT_COMPUTATION_RETIRES = 8; /** * Constant value describing the slope at which we assume to have reached the * saturation point */ private static final double SLOPE_SATURATION_POINT = 0.0001; /** * Error tolerance for root computation in case of the convergence value * calculation. */ private static final double TOLERANCE_CONVERGENCE_VALUE = 1.0; /** * Constant value describing the slope at which we assume that there is no * significant change in the curve value anymore and the convergence value is * reached. */ private static final double SLOPE_CONVERGENCE_VALUE = 0.0000001; /** The (extrapolated) learning curve function */ private LinearCombinationFunction learningCurve; /** The derivative of the learning curve */ private LinearCombinationFunction derivative; /** Size of the data set this learning curve was produced on */ private int dataSetSize; public LinearCombinationLearningCurve(final LinearCombinationLearningCurveConfiguration configuration, final int dataSetSize) { List<UnivariateFunction> learningCurves = new ArrayList<>(); List<UnivariateFunction> derivatives = new ArrayList<>(); for (LinearCombinationParameterSet parameterSet : configuration.getParameterSets()) { learningCurves.add(this.generateLearningCurve(parameterSet)); derivatives.add(this.generateDerivative(parameterSet)); } List<Double> weights = new ArrayList<>(); for (int i = 0; i < configuration.getParameterSets().size(); i++) { weights.add(1.0 / configuration.getParameterSets().size()); } this.learningCurve = new LinearCombinationFunction(learningCurves, weights); this.derivative = new LinearCombinationFunction(derivatives, weights); this.dataSetSize = dataSetSize; } private LinearCombinationFunction generateLearningCurve(final LinearCombinationParameterSet parameterSet) { List<UnivariateFunction> functions = new ArrayList<>(); List<Double> weights = new ArrayList<>(); // Vapor pressure if (parameterSet.getParameters().containsKey(LinearCombinationConstants.VAPOR_PRESSURE)) { ParametricFunction vaporPressure = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.VAPOR_PRESSURE)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); return Math.exp(a + (b / x) + c * Math.log(x)); } }; functions.add(vaporPressure); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.VAPOR_PRESSURE)); } // pow_3 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.POW_3)) { ParametricFunction pow3 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.POW_3)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double a = this.getParams().get(LinearCombinationConstants.A); double c = this.getParams().get(LinearCombinationConstants.C); return c - a * Math.pow(x, -1 * alpha); } }; functions.add(pow3); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.POW_3)); } // log log linear if (parameterSet.getParameters().containsKey(LinearCombinationConstants.LOG_LOG_LINEAR)) { ParametricFunction logLogLinear = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.LOG_LOG_LINEAR)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); return Math.log(a * Math.log(x) + b); } }; functions.add(logLogLinear); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.LOG_LOG_LINEAR)); } // hill3 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.HILL_3)) { ParametricFunction hill3 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.HILL_3)) { @Override public double value(final double x) { double y = this.getParams().get(LinearCombinationConstants.Y); double eta = this.getParams().get(LinearCombinationConstants.ETA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return (y * Math.pow(x, eta)) / (Math.pow(kappa, eta) + Math.pow(x, eta)); } }; functions.add(hill3); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.HILL_3)); } // log power if (parameterSet.getParameters().containsKey(LinearCombinationConstants.LOG_POWER)) { ParametricFunction logPower = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.LOG_POWER)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); return a / (1 + Math.pow(x / Math.exp(b), c)); } }; functions.add(logPower); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.LOG_POWER)); } // pow4 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.POW_4)) { ParametricFunction pow4 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.POW_4)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); return c - Math.pow(a * x + b, -alpha); } }; functions.add(pow4); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.POW_4)); } // MMF if (parameterSet.getParameters().containsKey(LinearCombinationConstants.MMF)) { ParametricFunction mmf = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.MMF)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return alpha - ((alpha - beta) / (1 + Math.pow(kappa * x, delta))); } }; functions.add(mmf); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.MMF)); } // exp4 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.EXP_4)) { ParametricFunction exp4 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.EXP_4)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); return c - Math.exp(-a * Math.pow(x, alpha) + b); } }; functions.add(exp4); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.EXP_4)); } // Janoschek if (parameterSet.getParameters().containsKey(LinearCombinationConstants.JANOSCHEK)) { ParametricFunction janoscheck = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.JANOSCHEK)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return alpha - (alpha - beta) * Math.exp(-kappa * Math.pow(x, delta)); } }; functions.add(janoscheck); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.JANOSCHEK)); } // Weibull if (parameterSet.getParameters().containsKey(LinearCombinationConstants.WEIBULL)) { ParametricFunction weibull = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.WEIBULL)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return alpha - (alpha - beta) * Math.exp(-1 * Math.pow(kappa * x, delta)); } }; functions.add(weibull); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.WEIBULL)); } // ilog2 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.ILOG_2)) { ParametricFunction ilog2 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.ILOG_2)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double c = this.getParams().get(LinearCombinationConstants.C); return c - (a / Math.log(x)); } }; functions.add(ilog2); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.ILOG_2)); } return new LinearCombinationFunction(functions, weights); } private LinearCombinationFunction generateDerivative(final LinearCombinationParameterSet parameterSet) { List<UnivariateFunction> functions = new ArrayList<>(); List<Double> weights = new ArrayList<>(); // Vapor pressure if (parameterSet.getParameters().containsKey(LinearCombinationConstants.VAPOR_PRESSURE)) { ParametricFunction vaporPressure = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.VAPOR_PRESSURE)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); return Math.pow(x, c - 2) * Math.exp(a + b / x) * (c * x - b); } }; functions.add(vaporPressure); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.VAPOR_PRESSURE)); } // pow_3 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.POW_3)) { ParametricFunction pow3 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.POW_3)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double a = this.getParams().get(LinearCombinationConstants.A); return a * alpha * Math.pow(x, -alpha - 1); } }; functions.add(pow3); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.POW_3)); } // log log linear if (parameterSet.getParameters().containsKey(LinearCombinationConstants.LOG_LOG_LINEAR)) { ParametricFunction logLogLinear = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.LOG_LOG_LINEAR)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); return a / (a * x * Math.log(x) + b * x); } }; functions.add(logLogLinear); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.LOG_LOG_LINEAR)); } // hill3 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.HILL_3)) { ParametricFunction hill3 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.HILL_3)) { @Override public double value(final double x) { double y = this.getParams().get(LinearCombinationConstants.Y); double eta = this.getParams().get(LinearCombinationConstants.ETA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return (y * eta * Math.pow(kappa, eta) * Math.pow(x, eta - 1)) / (Math.pow(Math.pow(kappa, eta) + Math.pow(x, eta), 2)); } }; functions.add(hill3); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.HILL_3)); } // log power if (parameterSet.getParameters().containsKey(LinearCombinationConstants.LOG_POWER)) { ParametricFunction logPower = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.LOG_POWER)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double c = this.getParams().get(LinearCombinationConstants.C); return -1 * (a * c * Math.pow(Math.exp(-b) * x, c)) / (x * Math.pow(Math.pow(Math.exp(-b) * x, c) + 1, 2)); } }; functions.add(logPower); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.LOG_POWER)); } // pow4 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.POW_4)) { ParametricFunction pow4 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.POW_4)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); return a * alpha * Math.pow(a * x + b, -alpha - 1); } }; functions.add(pow4); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.POW_4)); } // MMF if (parameterSet.getParameters().containsKey(LinearCombinationConstants.MMF)) { ParametricFunction mmf = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.MMF)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return (delta * (alpha - beta) * Math.pow(kappa * x, delta)) / (x * Math.pow(1 + Math.pow(kappa * x, delta), 2)); } }; functions.add(mmf); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.MMF)); } // exp4 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.EXP_4)) { ParametricFunction exp4 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.EXP_4)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); double b = this.getParams().get(LinearCombinationConstants.B); double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); return a * alpha * Math.pow(x, alpha - 1) * Math.exp(b - a * Math.pow(x, alpha)); } }; functions.add(exp4); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.EXP_4)); } // Janoschek if (parameterSet.getParameters().containsKey(LinearCombinationConstants.JANOSCHEK)) { ParametricFunction janoscheck = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.JANOSCHEK)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return kappa * delta * (alpha - beta) * Math.pow(x, delta - 1) * Math.exp(-kappa * Math.pow(x, delta)); } }; functions.add(janoscheck); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.JANOSCHEK)); } // Weibull if (parameterSet.getParameters().containsKey(LinearCombinationConstants.WEIBULL)) { ParametricFunction weibull = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.WEIBULL)) { @Override public double value(final double x) { double alpha = this.getParams().get(LinearCombinationConstants.ALPHA); double beta = this.getParams().get(LinearCombinationConstants.BETA); double delta = this.getParams().get(LinearCombinationConstants.DELTA); double kappa = this.getParams().get(LinearCombinationConstants.KAPPA); return (delta * (alpha - beta) * Math.exp(-1 * Math.pow(kappa * x, delta)) * Math.pow(kappa * x, delta)) / x; } }; functions.add(weibull); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.WEIBULL)); } // ilog2 if (parameterSet.getParameters().containsKey(LinearCombinationConstants.ILOG_2)) { ParametricFunction ilog2 = new ParametricFunction( parameterSet.getParameters().get(LinearCombinationConstants.ILOG_2)) { @Override public double value(final double x) { double a = this.getParams().get(LinearCombinationConstants.A); return a / (x * Math.pow(Math.log(x), 2)); } }; functions.add(ilog2); weights.add(parameterSet.getWeights().get(LinearCombinationConstants.ILOG_2)); } return new LinearCombinationFunction(functions, weights); } @Override public double getCurveValue(final double x) { return this.learningCurve.value(x); } @Override public double getSaturationPoint(final double epsilon) { return this.computeDerivativeRoot(epsilon, -1 * SLOPE_SATURATION_POINT, this.dataSetSize); } @Override public double getDerivativeCurveValue(final double x) { this.derivative.setOffset(0); return this.derivative.value(x); } @Override public double getConvergenceValue() { int x = (int) this.computeDerivativeRoot(TOLERANCE_CONVERGENCE_VALUE, -1 * SLOPE_CONVERGENCE_VALUE, this.dataSetSize * 100); return this.getCurveValue(x); } private double computeDerivativeRoot(final double epsilon, final double offset, final int upperIntervalBoundStart) { UnivariateSolver solver = new BrentSolver(0, epsilon); double result = -1; int lowerIntervalBound = 1; int upperIntervalBound = upperIntervalBoundStart; int retriesLeft = ROOT_COMPUTATION_RETIRES; this.derivative.setOffset(offset); while (retriesLeft > 0 && result == -1) { try { LOG.info("Trying to find root with offset {} in interval [{}/{}]", offset, lowerIntervalBound, upperIntervalBound); result = solver.solve(1000, this.derivative, lowerIntervalBound, upperIntervalBound); } catch (NoBracketingException e) { LOG.warn("Cannot find root in interval [{},{}]: {}", lowerIntervalBound, upperIntervalBound, e.getMessage()); retriesLeft--; LOG.warn("Retries left: {} / {}", retriesLeft, ROOT_COMPUTATION_RETIRES); upperIntervalBound *= 2; lowerIntervalBound *= 2; } } // Try higher lower bound (sometimes functions behave unexpected close to 0) if (result == -1) { try { LOG.info("Trying to find root with offset {} in interval [{}/{}]", offset, lowerIntervalBound, upperIntervalBound); result = solver.solve(1000, this.derivative, 50, upperIntervalBound); } catch (NoBracketingException e) { LOG.warn("Cannot find root in interval [{},{}]: {}", lowerIntervalBound, upperIntervalBound, e.getMessage()); } } if (result == -1) { throw new IllegalArgumentException( String.format("No solution could be found in interval [1,%d]", upperIntervalBound)); } return result; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationLearningCurveConfiguration.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.List; /** * A configuration for a linear combination learning curve consists of * parameterizations for at least one linear combination function. If multiple * parameterizations are given, the value of the learning curve can be * calculated by averaging the values of the individual linear combination * functions. * * @author Felix Weiland * */ public class LinearCombinationLearningCurveConfiguration { private List<LinearCombinationParameterSet> parameterSets; public List<LinearCombinationParameterSet> getParameterSets() { return this.parameterSets; } public void setParameterSets(final List<LinearCombinationParameterSet> parameterSets) { this.parameterSets = parameterSets; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.parameterSets == null) ? 0 : this.parameterSets.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } LinearCombinationLearningCurveConfiguration other = (LinearCombinationLearningCurveConfiguration) obj; if (this.parameterSets == null) { if (other.parameterSets != null) { return false; } } else if (!this.parameterSets.equals(other.parameterSets)) { return false; } return true; } @Override public String toString() { return "LinearCombinationLearningCurveConfiguration [parameterSets=" + this.parameterSets + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/LinearCombinationParameterSet.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.Map; /** * This class encapsulates all parameters that are required in order to create a * weighted linear combination of parameterized functions. Hence, a * LinearCombinationParameterSet object contains a weight for each function and, * for each function, a map which maps parameter names to its values. * * @author Felix Weiland * */ public class LinearCombinationParameterSet { /** Weights of the functions */ private Map<String, Double> weights; /** Individual function parameters */ private Map<String, Map<String, Double>> parameters; public Map<String, Double> getWeights() { return this.weights; } public void setWeights(final Map<String, Double> weights) { this.weights = weights; } public Map<String, Map<String, Double>> getParameters() { return this.parameters; } public void setParameters(final Map<String, Map<String, Double>> parameters) { this.parameters = parameters; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.parameters == null) ? 0 : this.parameters.hashCode()); result = prime * result + ((this.weights == null) ? 0 : this.weights.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } LinearCombinationParameterSet other = (LinearCombinationParameterSet) obj; if (this.parameters == null) { if (other.parameters != null) { return false; } } else if (!this.parameters.equals(other.parameters)) { return false; } if (this.weights == null) { if (other.weights != null) { return false; } } else if (!this.weights.equals(other.weights)) { return false; } return true; } @Override public String toString() { return "LinearCombinationParameterSet [weights=" + this.weights + ", parameters=" + this.parameters + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lc/ParametricFunction.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lc; import java.util.Map; import org.apache.commons.math3.analysis.UnivariateFunction; /** * This is a basic class that describes a function that can be parameterized with a set of parameters. * * @author Felix Weiland * */ public abstract class ParametricFunction implements UnivariateFunction { private Map<String, Double> params; public ParametricFunction() { super(); } public ParametricFunction(final Map<String, Double> params) { super(); this.params = params; } public Map<String, Double> getParams() { return this.params; } public void setParams(final Map<String, Double> params) { this.params = params; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lcnet/LCNetClient.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lcnet; /** * This class handles the connection to a server that runs pybnn. * This way we can use the LCNet from pybnn to get pointwise estimates * of learning curves for certain classifiers and configurations of a classifier. * * @author noni4 */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.ProtocolException; import java.net.URL; import java.util.Arrays; import java.util.HashMap; import org.api4.java.ai.ml.core.exception.PredictionException; import org.api4.java.ai.ml.core.exception.TrainingException; import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.ObjectMapper; import ai.libs.jaicore.logging.LoggerUtil; public class LCNetClient { private Logger logger = LoggerFactory.getLogger(LCNetClient.class); // TODO Should not be hardcoded like this private static final String SERVER_ADDRESS = "http://localhost:5001/"; public void train(final int[] xValues, final double[] yValues, final int dataSetSize, final double[][] configurations, final String identifier) throws TrainingException { if (xValues.length != yValues.length) { throw new IllegalArgumentException("xValues must contain the same number of values as yValues"); } if (xValues.length != configurations.length) { throw new IllegalArgumentException("xValues must contain as much numbers as configurations configurations"); } HttpURLConnection httpCon; try { httpCon = this.establishHttpCon("train", identifier); } catch (IOException e1) { throw new TrainingException("Could not train", e1); } JSONObject jsonData = new JSONObject(); for (int i = 0; i < xValues.length; i++) { double[] tmpArray = Arrays.copyOf(configurations[i], configurations[i].length + 2); tmpArray[configurations[i].length] = (double) xValues[i] / dataSetSize; tmpArray[configurations[i].length + 1] = yValues[i]; JSONArray allValues = new JSONArray(tmpArray); jsonData.put(Integer.toString(i), allValues); } OutputStreamWriter out; try { out = new OutputStreamWriter(httpCon.getOutputStream()); out.write(jsonData.toString()); out.close(); httpCon.getInputStream(); } catch (IOException e) { this.logger.error(LoggerUtil.getExceptionInfo(e)); } } public double predict(final int xValue, final double[] configurations, final String identifier) throws PredictionException { HttpURLConnection httpCon; try { httpCon = this.establishHttpCon("predict", identifier); JSONObject jsonData = new JSONObject(); double[] tmpArray = Arrays.copyOf(configurations, configurations.length + 1); tmpArray[configurations.length] = xValue; JSONArray allValues = new JSONArray(tmpArray); jsonData.put("0", allValues); OutputStreamWriter out = new OutputStreamWriter(httpCon.getOutputStream()); BufferedReader in = null; out.write(jsonData.toString()); out.close(); in = new BufferedReader(new InputStreamReader(httpCon.getInputStream())); StringBuilder inputBuilder = new StringBuilder(); String inputLine; while ((inputLine = in.readLine()) != null) { inputBuilder.append(inputLine); } HashMap<String, Double> entireInput = null; entireInput = new ObjectMapper().readValue(inputBuilder.toString(), HashMap.class); return entireInput.get("prediction").doubleValue(); } catch (IOException e1) { throw new PredictionException("Could not predict", e1); } } public void deleteNet(final String identifier) throws IOException { HttpURLConnection httpCon = this.establishHttpCon("delete", identifier); try (OutputStreamWriter out = new OutputStreamWriter(httpCon.getOutputStream());) { httpCon.getInputStream(); } } private HttpURLConnection establishHttpCon(final String urlParameter, final String identifier) throws IOException { URL url = new URL(SERVER_ADDRESS + urlParameter + "/" + identifier); HttpURLConnection httpCon = null; httpCon = (HttpURLConnection) url.openConnection(); httpCon.setDoOutput(true); try { httpCon.setRequestMethod("PUT"); } catch (ProtocolException e) { this.logger.error(LoggerUtil.getExceptionInfo(e)); } return httpCon; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lcnet/LCNetExtrapolationMethod.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lcnet; import java.io.IOException; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; import org.api4.java.ai.ml.core.exception.TrainingException; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.InvalidAnchorPointsException; import ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.LearningCurveExtrapolationMethod; /** * This class represents a learning curve extrapolation using the LCNet * from pybnn. It communicates with pybnn via the LCNetClient. If you use * if do not forget to call setConfigurations before you call * extrapolateLearningCurveFromAnchorPoints, because the LCNet requires * the configurations of you classifier * * @author noni4 */ public class LCNetExtrapolationMethod implements LearningCurveExtrapolationMethod { private LCNetClient lcNet = new LCNetClient(); private String identifier; private double[] configurations; public LCNetExtrapolationMethod(final String identifier) { this.identifier = identifier; this.configurations = null; } public void setConfigurations(final double[] configurations) { this.configurations = configurations; } @Override public ILearningCurve extrapolateLearningCurveFromAnchorPoints(final int[] xValues, final double[] yValues, final int dataSetSize) throws InvalidAnchorPointsException { if (this.configurations == null) { throw new UnsupportedOperationException("Configurations is not allowed to be null"); } else { return new PointWiseLearningCurve(dataSetSize, this.configurations, this.identifier); } } // It is not clarified yet if this method should be called from this class public void deleteNet() throws IOException { this.lcNet.deleteNet(this.identifier); } // It is not clarified yet if this method should be called from this class public void trainNet(final int[] xValues, final double[] yValues, final int dataSetSize, final double[][] configurations) throws TrainingException { this.lcNet.train(xValues, yValues, dataSetSize, configurations, this.identifier); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/functionprediction/learner/learningcurveextrapolation/lcnet/PointWiseLearningCurve.java
package ai.libs.jaicore.ml.functionprediction.learner.learningcurveextrapolation.lcnet; import org.api4.java.ai.ml.core.evaluation.learningcurve.ILearningCurve; import org.api4.java.ai.ml.core.exception.PredictionException; /** * This class represents a learning curve that gets returned by the * LCNet from pybnn * * @author noni4 */ public class PointWiseLearningCurve implements ILearningCurve { private int dataSetSize; private double[] configurations; private LCNetClient lcNetClient = new LCNetClient(); private String identifier; public PointWiseLearningCurve(final int dataSetSize, final double[] configurations, final String identifier) { this.dataSetSize = dataSetSize; this.configurations = configurations; this.identifier = identifier; } @Override public double getCurveValue(final double x) { try { return this.lcNetClient.predict((int) (x / this.dataSetSize), this.configurations, this.identifier); } catch (PredictionException e) { throw new UnsupportedOperationException(e); } } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/ggp/CFGConverter.java
package ai.libs.jaicore.ml.hpo.ggp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.api.IParameter; import ai.libs.jaicore.components.api.IRequiredInterfaceDefinition; import ai.libs.jaicore.components.exceptions.ComponentNotFoundException; import ai.libs.jaicore.components.model.CategoricalParameterDomain; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.components.model.ComponentUtil; import ai.libs.jaicore.components.model.NumericParameterDomain; public class CFGConverter { private static final String SPACE_REPLACEMENT = "#_#"; private static final String OR_OP = " | "; private static final String PRODUCTION_OP = " ::= "; private static final String NON_TERMINAL_PATTERN = "<%s>"; private static final String START_SYMBOL = String.format(NON_TERMINAL_PATTERN, "START"); private final Collection<? extends IComponent> components; private final String requestedInterface; public CFGConverter(final Collection<? extends IComponent> components, final String requestedInterface) { this.components = components; this.requestedInterface = requestedInterface; } public String toGrammar() { StringBuilder sb = new StringBuilder(); Collection<? extends IComponent> matchingComponents = ComponentUtil.getComponentsProvidingInterface(this.components, this.requestedInterface); Map<String, String> productions = new HashMap<>(); sb.append(START_SYMBOL).append(PRODUCTION_OP).append(this.componentsToOrListOfNonTerminals(matchingComponents)).append("\n"); for (IComponent component : matchingComponents) { this.addComponentProductions(this.components, component, productions); } productions.values().stream().forEach(sb::append); return sb.toString(); } private String componentsToOrListOfNonTerminals(final Collection<? extends IComponent> components) { return components.stream().map(x -> String.format(NON_TERMINAL_PATTERN, x.getName())).collect(Collectors.joining(OR_OP)); } private void addComponentProductions(final Collection<? extends IComponent> components, final IComponent component, final Map<String, String> productions) { StringBuilder compProduction = new StringBuilder(); String compNT = String.format(NON_TERMINAL_PATTERN, component.getName()); if (productions.containsKey(compNT)) { return; } compProduction.append(compNT).append(PRODUCTION_OP).append(component.getName()); for (IParameter param : component.getParameters()) { String nsParam = component.getName() + "." + param.getName(); String paramNT = String.format(NON_TERMINAL_PATTERN, nsParam); compProduction.append(" ").append(nsParam).append(" ").append(paramNT); if (param.getDefaultDomain() instanceof NumericParameterDomain) { NumericParameterDomain dom = (NumericParameterDomain) param.getDefaultDomain(); if (dom.isInteger()) { productions.put(paramNT, paramNT + PRODUCTION_OP + "RANDINT_TYPE0(" + (int) dom.getMin() + "," + (int) dom.getMax() + ")\n"); } else { productions.put(paramNT, paramNT + PRODUCTION_OP + "RANDFLOAT(" + dom.getMin() + "," + dom.getMax() + ")\n"); } } else if (param.getDefaultDomain() instanceof CategoricalParameterDomain) { CategoricalParameterDomain dom = (CategoricalParameterDomain) param.getDefaultDomain(); productions.put(paramNT, paramNT + PRODUCTION_OP + Arrays.stream(dom.getValues()).map(x -> x.contains(" ") ? x.replace(" ", SPACE_REPLACEMENT) : x).collect(Collectors.joining(OR_OP)) + "\n"); } } for (IRequiredInterfaceDefinition requiredInterface : component.getRequiredInterfaces()) { String nsI = component.getName() + "." + requiredInterface.getId(); String reqINT = String.format(NON_TERMINAL_PATTERN, requiredInterface.getName()); compProduction.append(" ").append(nsI).append(" ").append(reqINT); Collection<IComponent> componentsMatching = new ArrayList<>(); productions.computeIfAbsent(reqINT, t -> { componentsMatching.addAll(ComponentUtil.getComponentsProvidingInterface(components, requiredInterface.getName())); return t + PRODUCTION_OP + this.componentsToOrListOfNonTerminals(componentsMatching) + "\n"; }); componentsMatching.stream().forEach(c -> this.addComponentProductions(components, c, productions)); } compProduction.append("\n"); productions.put(compNT, compProduction.toString()); } public ComponentInstance grammarStringToComponentInstance(final String grammarString) throws ComponentNotFoundException { String[] tokens = grammarString.split(" "); Map<String, String> paramValues = new HashMap<>(); for (int i = 1; i < tokens.length; i = i + 2) { paramValues.put(tokens[i], tokens[i + 1].contains(SPACE_REPLACEMENT) ? tokens[i + 1].replace(SPACE_REPLACEMENT, " ") : tokens[i + 1]); } return this.buildComponentInstanceFromMap(tokens[0], paramValues); } private ComponentInstance buildComponentInstanceFromMap(final String componentName, final Map<String, String> values) throws ComponentNotFoundException { Map<String, String> parameters = new HashMap<>(); Map<String, List<IComponentInstance>> reqIs = new HashMap<>(); ComponentInstance root = new ComponentInstance(ComponentUtil.getComponentByName(componentName, this.components), parameters, reqIs); // reconstruct required interfaces for (IRequiredInterfaceDefinition reqI : root.getComponent().getRequiredInterfaces()) { root.getSatisfactionOfRequiredInterfaces().put(reqI.getId(), Arrays.asList(this.buildComponentInstanceFromMap(values.get(componentName + "." + reqI.getId()), values))); } // reconstruct param values for (IParameter param : root.getComponent().getParameters()) { root.getParameterValues().put(param.getName(), values.get(componentName + "." + param.getName())); } return root; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/ggp/GrammarBasedGeneticProgramming.java
package ai.libs.jaicore.ml.hpo.ggp; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.aeonbits.owner.ConfigFactory; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.attributedobjects.IObjectEvaluator; import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException; import org.api4.java.common.attributedobjects.ScoredItem; import org.epochx.gr.op.crossover.WhighamCrossover; import org.epochx.gr.op.init.GrowInitialiser; import org.epochx.gr.op.mutation.WhighamMutation; import org.epochx.gr.representation.GRCandidateProgram; import org.epochx.representation.CandidateProgram; import org.epochx.tools.grammar.Grammar; import org.epochx.tools.random.MersenneTwisterFast; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig; import ai.libs.jaicore.basic.StatisticsUtil; import ai.libs.jaicore.basic.algorithm.AOptimizer; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.components.model.SoftwareConfigurationProblem; import ai.libs.jaicore.ml.hpo.ggp.GrammarBasedGeneticProgramming.GGPSolutionCandidate; /** * Grammar-based genetic programming is an evolutionary algorithm capable of evolving individuals in the form of trees, where the trees are derived from a context-free grammar (CFG). * As in standard evolutionary algorithms (EA), a population is maintained that is evaluated by means of a fitness function and subsequently individuals are recombined with each * other and mutated. In contrast to standard EAs, special operators need to be applied to adhere to the rules imposed by the CFG and therefore to not invalidate individuals. * * This specific implementation allows for both limiting the execution by number of evaluations (generations*population_size) or a timeout. If the number of generations is set to an * infeasible value, it will be assumed that the number of generations is set to infinity and thus a timeout will be required. As soon as the timeout is hit, the best solution seen so far * will be returned. If the timeout is set to an infeasible value, the number of generations will be a required configuration value. When running the algorithm, it will run to the specified * number of generation and return the best result found until this point. If both stopping criterions are set, the algorithm will terminate with the criterion that is first evaluated to true. * So, either the timeout is hit or the maximum number of generations reached. * * Early Stopping: * If the algorithm is already converged early, i.e., before the timeout is hit or the maximum number of generations is reached, it will terminate if the algorithm has not seen any improvements * for a specified number of generations. * * Random Restart: * Sometimes the algorithm may get stuck in local optima. To overcome this issue and move away from the local optima, there is a soft reset option called random restart. If random restart is * configured, every x generations the algorithm could not find any improvement, the current population is wiped except for the best individuals (defined via the elitism config). Then, the * population is filled up with new randomly sampled individuals. Thereby, the algorithm is able to move to other areas of the search space easily (given there are better fitness values). * * @author mwever * */ public class GrammarBasedGeneticProgramming extends AOptimizer<SoftwareConfigurationProblem<Double>, GGPSolutionCandidate, Double> { private static final IGrammarBasedGeneticProgrammingConfig DEF_CONFIG = ConfigFactory.create(IGrammarBasedGeneticProgrammingConfig.class); private static final Logger LOGGER = LoggerFactory.getLogger(GrammarBasedGeneticProgramming.class); private final MersenneTwisterFast rng; private final IObjectEvaluator<IComponentInstance, Double> evaluator; private final CFGConverter converter; private final Map<String, Double> cacheMap = new HashMap<>(); private Grammar grammar; private AtomicInteger earlyStoppingCounter = new AtomicInteger(0); private ConcurrentLinkedQueue<GGPSolutionCandidate> ratedSolutionCandidatesInPopulation; public class GGPSolutionCandidate implements ScoredItem<Double> { private IComponentInstance ci; private final Double score; private GGPSolutionCandidate(final IComponentInstance ci, final Double score) { this.ci = ci; this.score = score; } public IComponentInstance getComponentInstance() { return this.ci; } @Override public Double getScore() { return this.score; } } public GrammarBasedGeneticProgramming(final SoftwareConfigurationProblem<Double> input, final long seed) { this(DEF_CONFIG, input, seed); } public GrammarBasedGeneticProgramming(final IOwnerBasedAlgorithmConfig config, final SoftwareConfigurationProblem<Double> input, final long seed) { super(config, input); this.rng = new MersenneTwisterFast(seed); this.evaluator = input.getCompositionEvaluator(); this.converter = new CFGConverter(input.getComponents(), input.getRequiredInterface()); this.ratedSolutionCandidatesInPopulation = new ConcurrentLinkedQueue<>(); } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException, AlgorithmException { switch (this.getState()) { case CREATED: LOGGER.info("Setup GrammarBasedGeneticProgramming algorithm."); return this.activate(); case ACTIVE: LOGGER.info("Start GrammarBasedGeneticProgramming run"); Semaphore finished = new Semaphore(0); Thread evoThread = new Thread(() -> { try { String grammarString = GrammarBasedGeneticProgramming.this.converter.toGrammar(); LOGGER.debug("Generated the following grammar string for the provided component repository:\n{}", grammarString); GrammarBasedGeneticProgramming.this.grammar = new Grammar(grammarString); // For initializing the population. GrowInitialiser initPop = new GrowInitialiser(GrammarBasedGeneticProgramming.this.rng, GrammarBasedGeneticProgramming.this.grammar, GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize(), GrammarBasedGeneticProgramming.this.getConfig().getMaxDepth(), false); // Set initial population list and sort the population List<CandidateProgram> population = new ArrayList<>(initPop.getInitialPopulation()); int g = 0; while (!Thread.currentThread().isInterrupted() && (GrammarBasedGeneticProgramming.this.getConfig().getNumGenerations() <= 0 || g < GrammarBasedGeneticProgramming.this.getConfig().getNumGenerations())) { LOGGER.debug("Evaluate population of generation {}.", (g + 1)); GrammarBasedGeneticProgramming.this.evaluate(population, g); Collections.sort(population, (o1, o2) -> Double.compare(((GRCandidateProgram) o1).getFitnessValue(), ((GRCandidateProgram) o2).getFitnessValue())); // print fitness statistics of current generation if enabled. if (GrammarBasedGeneticProgramming.this.getConfig().getPrintFitnessStats()) { List<Double> fitnessList = population.stream().map(x -> ((GRCandidateProgram) x).getFitnessValue()).collect(Collectors.toList()); LOGGER.info("Generation #{} (population size: {}) - min: {} - mean: {} - max: {}", g + 1, fitnessList.size(), StatisticsUtil.min(fitnessList), StatisticsUtil.mean(fitnessList), StatisticsUtil.max(fitnessList)); } // if early termination is activated and the number of generations without change exceeds the configured generations, stop the evolution int generationsWithoutImprovementCounter = GrammarBasedGeneticProgramming.this.earlyStoppingCounter.getAndIncrement(); if (GrammarBasedGeneticProgramming.this.getConfig().getEarlyStopping() >= 1 && generationsWithoutImprovementCounter > GrammarBasedGeneticProgramming.this.getConfig().getEarlyStopping()) { LOGGER.info("Best candidate did not change for {} generations: Thus, stop early.", GrammarBasedGeneticProgramming.this.getConfig().getEarlyStopping()); break; } List<CandidateProgram> offspring = new ArrayList<>(GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize()); // keep elite for (int i = 0; i < GrammarBasedGeneticProgramming.this.getConfig().getElitismSize(); i++) { offspring.add(population.get(i)); } // check whether thread has been interrupted. If so, terminate the algorithm. if (Thread.interrupted()) { LOGGER.debug("Thread got interrupted, exit GGP."); throw new InterruptedException(); } // if enabled, perform random restart every x generations if (GrammarBasedGeneticProgramming.this.getConfig().getRandomRestart() > 0 && ((generationsWithoutImprovementCounter) % GrammarBasedGeneticProgramming.this.getConfig().getRandomRestart() == 0)) { LOGGER.debug("It is about time to perform a random restart in generation {}. Randomly generate {} individuals for restart.", g, GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize() - GrammarBasedGeneticProgramming.this.getConfig().getElitismSize()); GrowInitialiser randRestartPop = new GrowInitialiser(GrammarBasedGeneticProgramming.this.rng, GrammarBasedGeneticProgramming.this.grammar, GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize() - GrammarBasedGeneticProgramming.this.getConfig().getElitismSize(), GrammarBasedGeneticProgramming.this.getConfig().getMaxDepth(), false); offspring.addAll(randRestartPop.getInitialPopulation()); } else { // fill up offspring with recombinations while (offspring.size() < GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize()) { if (Thread.interrupted()) { throw new InterruptedException(); } CandidateProgram child1 = GrammarBasedGeneticProgramming.this.tournament(population).clone(); CandidateProgram child2 = GrammarBasedGeneticProgramming.this.tournament(population).clone(); double randomX = GrammarBasedGeneticProgramming.this.rng.nextDouble(); if (randomX < GrammarBasedGeneticProgramming.this.getConfig().getCrossoverRate()) { WhighamCrossover xover = new WhighamCrossover(GrammarBasedGeneticProgramming.this.rng); CandidateProgram[] xoverprograms = xover.crossover(child1.clone(), child2.clone()); if (xoverprograms != null) { child1 = xoverprograms[0]; child2 = xoverprograms[1]; } } child1 = GrammarBasedGeneticProgramming.this.mutate(child1); child2 = GrammarBasedGeneticProgramming.this.mutate(child2); offspring.add(child1); if (offspring.size() < GrammarBasedGeneticProgramming.this.getConfig().getPopulationSize()) { offspring.add(child2); } } } population = offspring; g++; } } catch (InterruptedException e) { LOGGER.debug("GGP thread got interrupted, release semaphore and shutdown."); Thread.currentThread().interrupt(); } catch (Exception e) { LOGGER.error("Unexpected exception occurred and forced GGP to terminate.", e); e.printStackTrace(); } finally { finished.release(); } }); evoThread.start(); try { if (this.getConfig().getTimeout().milliseconds() > 0) { // a timeout is specified, thus, finish with what occurs first: timeout or max generations reached if (LOGGER.isDebugEnabled()) { LOGGER.debug("Wait for {} ms", this.getConfig().getTimeout().milliseconds()); } boolean acquired = finished.tryAcquire(this.getConfig().getTimeout().milliseconds(), TimeUnit.MILLISECONDS); if (!acquired) { LOGGER.debug("Timeout occurred for evo thread. Now shut it down."); evoThread.interrupt(); } } else { // no timeout configured: wait until max generations are reached. finished.acquire(); } } catch (InterruptedException e) { LOGGER.debug("Main GGP thread got interrupted, now interrupt evoThread."); evoThread.interrupt(); } return this.terminate(); default: throw new IllegalStateException("Illegal state for this algorithm to run anything."); } } private CandidateProgram mutate(final CandidateProgram program) { CandidateProgram mutated = program.clone(); if (this.rng.nextDouble() < this.getConfig().getMutationRate()) { WhighamMutation mutation = new WhighamMutation(this.rng); mutated = mutation.mutate(mutated); } return mutated; } private CandidateProgram tournament(final List<CandidateProgram> population) { List<CandidateProgram> candidates = new ArrayList<>(population); Collections.shuffle(candidates, new Random(this.rng.nextLong())); List<CandidateProgram> tournamentCandidates = IntStream.range(0, this.getConfig().getTournamentSize()).mapToObj(candidates::get).collect(Collectors.toList()); Collections.sort(tournamentCandidates); return tournamentCandidates.get(0); } private void evaluate(final List<CandidateProgram> population, final int generation) throws InterruptedException { this.ratedSolutionCandidatesInPopulation.clear(); ExecutorService pool = Executors.newFixedThreadPool(this.getConfig().cpus()); AtomicBoolean interrupted = new AtomicBoolean(false); Semaphore semaphore = new Semaphore(0); try { for (CandidateProgram individual : population) { if (this.cacheMap.containsKey(individual.toString())) { ((GRCandidateProgram) individual).setFitnessValue(this.cacheMap.get(individual.toString())); semaphore.release(); } else { Runnable evaluateTask = () -> { GRCandidateProgram realInd = ((GRCandidateProgram) individual); try { if (Thread.interrupted() || interrupted.get()) { throw new InterruptedException(); } ComponentInstance ci = GrammarBasedGeneticProgramming.this.converter.grammarStringToComponentInstance(individual.toString()); ci.putAnnotation("generation", generation + ""); // annotate component instance to be an individual of generation $generation$ double fitnessValue = GrammarBasedGeneticProgramming.this.evaluator.evaluate(ci); GGPSolutionCandidate solutionCandidate = new GGPSolutionCandidate(ci, fitnessValue); this.ratedSolutionCandidatesInPopulation.add(solutionCandidate); if (GrammarBasedGeneticProgramming.this.updateBestSeenSolution(solutionCandidate)) { GrammarBasedGeneticProgramming.this.earlyStoppingCounter.set(0); } realInd.setFitnessValue(fitnessValue); } catch (ObjectEvaluationFailedException | InterruptedException e) { realInd.setFitnessValue(GrammarBasedGeneticProgramming.this.getConfig().getFailedEvaluationScore()); if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); } } catch (Exception e) { LOGGER.warn("Could not evaluate individual {}", individual, e); } finally { semaphore.release(); } }; pool.submit(evaluateTask); } } // all tasks are submitted, so shutdown the pool once all the tasks are done. pool.shutdown(); // wait for all the evaluations to finish. semaphore.acquire(population.size()); // try to put the fitness score into the cache to not re-evaluate any candidate solutions population.stream().forEach(x -> { try { this.cacheMap.put(x.toString(), x.getFitness()); } catch (Exception e) { // could not cache fitness => nullpointer exception must have occurred, so ignore the candidate and do not put it into the cache. } }); } catch (InterruptedException e) { LOGGER.debug("Got interrupted while evaluating population. Shutdown task now."); interrupted.set(true); pool.shutdownNow(); throw e; } } public List<GGPSolutionCandidate> getLastRatedPopulation() { return new ArrayList<>(this.ratedSolutionCandidatesInPopulation); } @Override public IGrammarBasedGeneticProgrammingConfig getConfig() { return (IGrammarBasedGeneticProgrammingConfig) super.getConfig(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/ggp/IGrammarBasedGeneticProgrammingConfig.java
package ai.libs.jaicore.ml.hpo.ggp; import org.aeonbits.owner.Config.Sources; import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig; @Sources({ "file:conf/ggp.properties" }) public interface IGrammarBasedGeneticProgrammingConfig extends IOwnerBasedAlgorithmConfig { /** * @return The size of the population. */ @Key("ggp.population_size") @DefaultValue("100") public int getPopulationSize(); /** * @return The number of best individuals to keep for the next generation. */ @Key("ggp.elitism_size") @DefaultValue("5") public int getElitismSize(); /** * @return The number of best individuals to keep for the next generation. */ @Key("ggp.tournament_size") @DefaultValue("2") public int getTournamentSize(); /** * The maximum number of generations to conduct. A value <= 0 refers to infinite number of generations and requires a timeout to be set instead. * * @return The maximum number of generations to conduct. */ @Key("ggp.generations") @DefaultValue("100") public int getNumGenerations(); /** * @return Maximum depth of a single tree during initialization. */ @Key("ggp.max_depth") @DefaultValue("50") public int getMaxDepth(); /** * @return The rate at which a cross over is performed. */ @Key("ggp.xover.rate") @DefaultValue("0.9") public double getCrossoverRate(); /** * @return The rate at which an individual is mutated. */ @Key("ggp.mutation.rate") @DefaultValue("0.1") public double getMutationRate(); @Key("ggp.log.fitness_stats") @DefaultValue("true") public boolean getPrintFitnessStats(); /** * Early stopping terminates the evolutionary process early if there were no changes for a certain amount of time. * If configured with a value x > 0, GGP will check whether the best solution was updated within the last x generations. * As soon as the number of generations the best solution did not change exceeds x it will terminate the evolutionary run. * * @return The number of generations to wait for the best solution to change. */ @Key("ggp.early_stopping") @DefaultValue("20") public int getEarlyStopping(); /** * In order to increase diversity, the population (except for elite individuals) is substituted by randomly generated individuals to perform a random restart (seeded with elite individuals only). * If this option is set to <= 0, this feature is deactivated. * * @return The number of generations after which to perform a random restart. */ @Key("ggp.random_restart") @DefaultValue("10") public int getRandomRestart(); /** * If the evaluation of an individual fails, we will need to nevertheless assign it a score. Ideally, this score is worse than * any scores that can be obtained by successfully evaluating individuals. * * @return The score that is assigned to individuals that failed to be evaluated. */ @Key("ggp.failed_eval_score") @DefaultValue("10000") public double getFailedEvaluationScore(); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/multifidelity/MultiFidelitySoftwareConfigurationProblem.java
package ai.libs.jaicore.ml.hpo.multifidelity; import java.io.File; import java.io.IOException; import java.util.Collection; import ai.libs.jaicore.components.api.IComponent; import ai.libs.jaicore.components.api.IComponentInstance; import ai.libs.jaicore.components.model.SoftwareConfigurationProblem; import ai.libs.jaicore.ml.core.evaluation.evaluator.IMultiFidelityObjectEvaluator; /** * A multi fidelity software configuration problem is a software configuraiton problem but requiring the composition evaluator to * support multi-fidelity, i.e. evaluating a candidate with a specified amount of resources. * * @author mwever * * @param <V> A comparable value for assessing the quality of candidates, usually a Double. */ public class MultiFidelitySoftwareConfigurationProblem<V extends Comparable<V>> extends SoftwareConfigurationProblem<V> { public MultiFidelitySoftwareConfigurationProblem(final File configurationFile, final String requestedInterface, final IMultiFidelityObjectEvaluator<IComponentInstance, V> compositionEvaluator) throws IOException { super(configurationFile, requestedInterface, compositionEvaluator); } public MultiFidelitySoftwareConfigurationProblem(final Collection<IComponent> components, final String requestedInterface, final IMultiFidelityObjectEvaluator<IComponentInstance, V> compositionEvaluator) { super(components, requestedInterface, compositionEvaluator); } public MultiFidelitySoftwareConfigurationProblem(final MultiFidelitySoftwareConfigurationProblem<V> problem) { super(problem); } @Override public IMultiFidelityObjectEvaluator<IComponentInstance, V> getCompositionEvaluator() { return (IMultiFidelityObjectEvaluator<IComponentInstance, V>) super.getCompositionEvaluator(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/multifidelity
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/multifidelity/hyperband/Hyperband.java
package ai.libs.jaicore.ml.hpo.multifidelity.hyperband; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Semaphore; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.IntStream; import org.api4.java.algorithm.events.IAlgorithmEvent; import org.api4.java.algorithm.exceptions.AlgorithmException; import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException; import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException; import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.basic.MathExt; import ai.libs.jaicore.basic.algorithm.AOptimizer; import ai.libs.jaicore.components.api.IEvaluatedSoftwareConfigurationSolution; import ai.libs.jaicore.components.model.ComponentInstance; import ai.libs.jaicore.components.model.ComponentInstanceUtil; import ai.libs.jaicore.ml.hpo.multifidelity.MultiFidelitySoftwareConfigurationProblem; import ai.libs.jaicore.ml.hpo.multifidelity.hyperband.Hyperband.HyperbandSolutionCandidate; import ai.libs.jaicore.ml.hpo.multifidelity.hyperband.Hyperband.MultiFidelityScore; /** * HyperBand is a simple but effective hyper-parameter optimization technique, heavily relying on a technique called successive halving. * Given a maximum amount of allocatable resources r_max and an integer parameter eta > 1, it allocates resources in a clever way, racing * randomly sampled solution candidates with increasing resources for more promising ones. * * For more details, refer to the published paper by Li et al. from 2018: * Hyperband: A Novel Bandit-Based Approach to Hyperparameter Optimization. In: Journal of Machine Learning research 18 (2018) 1-52 * * @author mwever * */ public class Hyperband extends AOptimizer<MultiFidelitySoftwareConfigurationProblem<Double>, HyperbandSolutionCandidate, MultiFidelityScore> { private static final Logger LOGGER = LoggerFactory.getLogger(Hyperband.class); public class MultiFidelityScore implements Comparable<MultiFidelityScore> { private final double r; private final double score; public MultiFidelityScore(final double r, final double score) { this.r = r; this.score = score; } @Override public int compareTo(final MultiFidelityScore o) { // compare budgets: the more the better (later round) int compareBudget = Double.compare(o.r, this.r); // if budget is not equal prefer the score evaluated on larger budget if (compareBudget != 0) { return compareBudget; } else { // compare scores: the smaller the better (loss minimization) return Double.compare(this.score, o.score); } } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.getEnclosingInstance().hashCode(); long temp; temp = Double.doubleToLongBits(this.r); result = prime * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(this.score); result = prime * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(final Object o) { if (!(o instanceof MultiFidelityScore)) { return false; } MultiFidelityScore other = (MultiFidelityScore) o; return (Math.abs(this.r - other.r) < 1E-8 && Math.abs(this.score - other.score) < 1E-8); } @Override public String toString() { return "(" + this.r + ";" + this.score + ")"; } private Hyperband getEnclosingInstance() { return Hyperband.this; } } public class HyperbandSolutionCandidate implements IEvaluatedSoftwareConfigurationSolution<MultiFidelityScore> { private ComponentInstance ci; private MultiFidelityScore score; public HyperbandSolutionCandidate(final ComponentInstance ci, final double r, final double score) { this.ci = ci; this.score = new MultiFidelityScore(r, score); } @Override public MultiFidelityScore getScore() { return this.score; } @Override public ComponentInstance getComponentInstance() { return this.ci; } @Override public String toString() { return "c:" + this.score; } } private double eta; private double rMax; private double crashedEvaluationScore; // total budget B private double b; // number of brackets s_max private int sMax; private Random rand; private ExecutorService pool = null; public Hyperband(final IHyperbandConfig config, final MultiFidelitySoftwareConfigurationProblem<Double> problem) { super(config, problem); this.rand = new Random(config.getSeed()); } @Override public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException, AlgorithmException { switch (this.getState()) { case CREATED: this.eta = this.getConfig().getEta(); this.rMax = this.getInput().getCompositionEvaluator().getMaxBudget(); this.crashedEvaluationScore = this.getConfig().getCrashScore(); if (this.getConfig().getIterations().equals("auto")) { this.sMax = (int) Math.floor(MathExt.logBase(this.rMax, this.eta)); } else { this.sMax = Integer.parseInt(this.getConfig().getIterations()); } this.b = (this.sMax + 1) * this.rMax; if (this.getConfig().cpus() > 1) { this.pool = Executors.newFixedThreadPool(this.getConfig().cpus()); } LOGGER.info("Initialized HyperBand with eta={}, r_max={}, s_max={}, b={} and parallelizing with {} cpu cores.", this.eta, this.rMax, this.sMax, this.b, this.getConfig().cpus()); return super.activate(); case INACTIVE: throw new AlgorithmException("Algorithm has already finished."); default: case ACTIVE: for (int s = this.sMax; s >= 0; s--) { int n = (int) Math.ceil((this.b / this.rMax) * (Math.pow(this.eta, s) / (s + 1))); double r = (this.rMax) * Math.pow(this.eta, -s); LOGGER.info("Execute round {} of HyperBand with n={}, r={}", (this.sMax - s + 1), n, r); // sample random configurations List<ComponentInstance> t = this.getNCandidates(n); // begin successive halving with (n,r) inner loop for (int i = 0; i <= s; i++) { int nI = (int) Math.floor(n / Math.pow(this.eta, i)); double rI = (r * Math.pow(this.eta, i)); // evaluated candidates List<HyperbandSolutionCandidate> evaluatedCandidates = this.evaluate(t, rI); // sort, update best seen solution evaluatedCandidates.sort((o1, o2) -> o1.getScore().compareTo(o2.getScore())); this.updateBestSeenSolution(evaluatedCandidates.get(0)); // select top k t.clear(); int k = (int) Math.floor(nI / this.eta); IntStream.range(0, k).mapToObj(x -> evaluatedCandidates.get(x).getComponentInstance()).forEach(t::add); } } if (this.pool != null) { this.pool.shutdownNow(); } return super.terminate(); } } private List<HyperbandSolutionCandidate> evaluate(final List<ComponentInstance> t, final double budget) throws InterruptedException { Lock lock = new ReentrantLock(); List<HyperbandSolutionCandidate> candidateList = new ArrayList<>(t.size()); Semaphore sem = new Semaphore(0); List<Runnable> runnables = new ArrayList<>(t.size()); for (ComponentInstance ci : t) { runnables.add(() -> { double score; try { score = Hyperband.this.getInput().getCompositionEvaluator().evaluate(ci, budget); } catch (InterruptedException e) { Thread.currentThread().interrupt(); score = Hyperband.this.crashedEvaluationScore; } catch (ObjectEvaluationFailedException e) { score = Hyperband.this.crashedEvaluationScore; } lock.lock(); try { candidateList.add(new HyperbandSolutionCandidate(ci, budget, score)); } finally { lock.unlock(); sem.release(); } }); } if (this.pool != null) { runnables.stream().forEach(this.pool::submit); sem.acquire(t.size()); } else { runnables.stream().forEach(Runnable::run); } return candidateList; } private List<ComponentInstance> getNCandidates(final int n) { List<ComponentInstance> ciList = new ArrayList<>(n); for (int i = 0; i < n; i++) { ciList.add(ComponentInstanceUtil.sampleRandomComponentInstance(this.getInput().getRequiredInterface(), this.getInput().getComponents(), this.rand)); } return ciList; } @Override public IHyperbandConfig getConfig() { return (IHyperbandConfig) super.getConfig(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/multifidelity
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/hpo/multifidelity/hyperband/IHyperbandConfig.java
package ai.libs.jaicore.ml.hpo.multifidelity.hyperband; import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig; public interface IHyperbandConfig extends IOwnerBasedAlgorithmConfig { public static final String NS = "hpo.hyperband."; public static final String K_ETA = NS + "eta"; public static final String K_SEED = NS + "seed"; public static final String K_ITERATIONS = NS + "iterations"; public static final String K_CRASH_SCORE = NS + "crash_score"; /** * The parameter eta defines that after each round eta^-1 many solutions of the current population are preserved for the next stage of a race. The default value (according to the Hyperband paper) is 3. * * @return The value of the parameter eta. */ @Key(K_ETA) @DefaultValue("3") public double getEta(); /** * @return The seed for the pseudo random number generator. */ @Key(K_SEED) @DefaultValue("42") public long getSeed(); /** * The number of iterations can either be defined by 'auto' to be calculated as proposed in the paper or by defining a custom positive integer. Caution: The number of candidates in the first round is exponential in the number of iterations. * @return The number of iterations. */ @Key(K_ITERATIONS) @DefaultValue("auto") public String getIterations(); /** * @return The score to be assigned to crashed evaluations. */ @Key(K_CRASH_SCORE) @DefaultValue(Integer.MAX_VALUE + "") public double getCrashScore(); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm/dataset/SensorTimeSeries.java
package ai.libs.jaicore.ml.pdm.dataset; import java.util.ArrayList; import java.util.List; import ai.libs.jaicore.basic.sets.Pair; public class SensorTimeSeries { private List<Pair<Integer, Double>> timestepValuePairs; public SensorTimeSeries() { timestepValuePairs = new ArrayList<>(); } /** * Adds a timestep-value pair to the this time series. It is assumed that the given timestep is * larger than any other before, so that at the end it holds: t_i < t_j for i < j. * * @param timestep The timestep for which a value will be added * @param value The value of the timestep */ public void addValue(int timestep, double value) { timestepValuePairs.add(new Pair<>(timestep, value)); } /** * Returns the value of the given timestep if one exists, null otherwise. * * @param timestep The timestep to get the value for * @return The value of the given timestep if one exists, otherwise null. */ public Double getValueOrNull(int timestep) { for (int i = 0; i < timestepValuePairs.size(); i++) { if (timestepValuePairs.get(i).getX().equals(timestep)) { return timestepValuePairs.get(i).getY(); } } return null; } /** * Returns a part of this time series starting at the given {@code fromTimestep} and ending at * the given {@code toTimestep} excluding. * * @param fromTimestep The starting point of the window * @param toTimestep The ending point of the window (exclusive) * @return A window of this {@link SensorTimeSeries} */ public SensorTimeSeries getWindowedTimeSeries(int fromTimestep, int toTimestep) { SensorTimeSeries newSensorTimeSeries = new SensorTimeSeries(); for (int t = 0; t < timestepValuePairs.size(); t++) { if (fromTimestep <= timestepValuePairs.get(t).getX() && timestepValuePairs.get(t).getX() < toTimestep) { newSensorTimeSeries.addValue(timestepValuePairs.get(t).getX(), timestepValuePairs.get(t).getY()); } } return newSensorTimeSeries; } public int getLength() { return timestepValuePairs.get(timestepValuePairs.size() - 1).getX(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((timestepValuePairs == null) ? 0 : timestepValuePairs.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } SensorTimeSeries other = (SensorTimeSeries) obj; if (timestepValuePairs == null) { if (other.timestepValuePairs != null) { return false; } } else if (!timestepValuePairs.equals(other.timestepValuePairs)) { return false; } return true; } @Override public String toString() { return "SensorTimeSeries [timestepValuePairs=" + timestepValuePairs + "]"; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm/dataset/SensorTimeSeriesAttribute.java
package ai.libs.jaicore.ml.pdm.dataset; import java.util.StringJoiner; import org.api4.java.ai.ml.core.dataset.schema.attribute.IAttributeValue; import org.api4.java.ai.ml.core.dataset.schema.attribute.IObjectAttribute; import ai.libs.jaicore.ml.core.dataset.schema.attribute.AGenericObjectAttribute; import ai.libs.jaicore.ml.core.dataset.schema.attribute.NumericAttribute; public class SensorTimeSeriesAttribute extends AGenericObjectAttribute<SensorTimeSeries> implements IObjectAttribute<NumericAttribute> { private static final long serialVersionUID = 7375915385236514621L; private static final String SENSOR_TIME_SERIES_BORDER_FLAG = "\""; private static final String EMPTY_STRING = ""; private static final String DATA_POINT_SEPARATOR = " "; private static final String SPLIT_MULTIPLE_WHITESPACES = "\\s+"; private static final String TIMESTEP_VALUE_SEPARATOR = "#"; public SensorTimeSeriesAttribute(final String name) { super(name); } @Override public boolean isValidValue(final Object value) { return (value instanceof SensorTimeSeries || value instanceof SensorTimeSeriesAttributeValue); } @Override public String getStringDescriptionOfDomain() { return "[TS] " + this.getName(); } @Override public IAttributeValue getAsAttributeValue(final Object object) { if (object instanceof SensorTimeSeriesAttributeValue) { return new SensorTimeSeriesAttributeValue(this, ((SensorTimeSeriesAttributeValue) object).getValue()); } else if (object instanceof SensorTimeSeries) { return new SensorTimeSeriesAttributeValue(this, (SensorTimeSeries) object); } throw new IllegalArgumentException("No valid value for this attribute"); } @Override public double toDouble(final Object object) { throw new UnsupportedOperationException("Not yet implemented in SensorTimeSeriesAttribute"); } /** * {@inheritDoc} Returns format: "t1:v1 t2:v2 ... tn:vn" */ @Override public String serializeAttributeValue(final Object value) { StringJoiner sj = new StringJoiner(DATA_POINT_SEPARATOR); SensorTimeSeries sensorTimeSeries = (SensorTimeSeries) value; for (int t = 0; t <= sensorTimeSeries.getLength(); t++) { if (sensorTimeSeries.getValueOrNull(t) != null) { sj.add(t + TIMESTEP_VALUE_SEPARATOR + sensorTimeSeries.getValueOrNull(t)); } } return SENSOR_TIME_SERIES_BORDER_FLAG + sj.toString() + SENSOR_TIME_SERIES_BORDER_FLAG; } /** * {@inheritDoc} Given format:: "t1:v1 t2:v2 ... tn:vn" */ @Override public SensorTimeSeries deserializeAttributeValue(String string) { string = string.replace(SENSOR_TIME_SERIES_BORDER_FLAG, EMPTY_STRING); String[] splittedString = string.split(SPLIT_MULTIPLE_WHITESPACES); SensorTimeSeries sensorTimeSeries = new SensorTimeSeries(); for (int i = 0; i < splittedString.length; i++) { String[] dataPoint = splittedString[i].split(TIMESTEP_VALUE_SEPARATOR); sensorTimeSeries.addValue(Integer.parseInt(dataPoint[0]), Double.parseDouble(dataPoint[1])); } return sensorTimeSeries; } @Override protected SensorTimeSeries getValueAsTypeInstance(final Object object) { if (this.isValidValue(object)) { if (object instanceof SensorTimeSeriesAttributeValue) { return ((SensorTimeSeriesAttributeValue) object).getValue(); } else { return (SensorTimeSeries) object; } } throw new IllegalArgumentException("No valid value for this attribute"); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/pdm/dataset/SensorTimeSeriesAttributeValue.java
package ai.libs.jaicore.ml.pdm.dataset; import org.api4.java.ai.ml.core.dataset.schema.attribute.IAttribute; import org.api4.java.ai.ml.core.dataset.schema.attribute.IObjectAttributeValue; public class SensorTimeSeriesAttributeValue implements IObjectAttributeValue<SensorTimeSeries> { private final SensorTimeSeriesAttribute attribute; private final SensorTimeSeries value; public SensorTimeSeriesAttributeValue(SensorTimeSeriesAttribute attribute, SensorTimeSeries value) { super(); this.attribute = attribute; this.value = value; } @Override public IAttribute getAttribute() { return attribute; } @Override public SensorTimeSeries getValue() { return value; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/RankingPredictionBatch.java
package ai.libs.jaicore.ml.ranking; import java.util.List; import java.util.stream.Collectors; import org.api4.java.ai.ml.core.evaluation.IPrediction; import org.api4.java.ai.ml.ranking.IRanking; import org.api4.java.ai.ml.ranking.IRankingPredictionBatch; import ai.libs.jaicore.ml.core.evaluation.PredictionBatch; public class RankingPredictionBatch extends PredictionBatch implements IRankingPredictionBatch { public RankingPredictionBatch(final List<IRanking<?>> predictionBatch) { super(predictionBatch.stream().map(x -> (IPrediction) x).collect(Collectors.toList())); } public RankingPredictionBatch(final IRanking<?>[] predictionBatch) { super(predictionBatch); } @SuppressWarnings("unchecked") @Override public List<IRanking<?>> getPredictions() { return (List<IRanking<?>>) super.getPredictions(); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/package-info.java
/** * */ /** * @author mwever * */ package ai.libs.jaicore.ml.ranking;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/IVectorDyad.java
package ai.libs.jaicore.ml.ranking.dyad; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad; import org.api4.java.common.math.IVector; public interface IVectorDyad extends IDyad { @Override public IVector getContext(); @Override public IVector getAlternative(); }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/package-info.java
/** * Dyad Ranking package. * @author mwever * */ package ai.libs.jaicore.ml.ranking.dyad;
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/ADyadRankingInstance.java
package ai.libs.jaicore.ml.ranking.dyad.dataset; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking; public abstract class ADyadRankingInstance implements INDArrayDyadRankingInstance { @Override public INDArray toMatrix() { List<INDArray> dyadList = new ArrayList<>(this.getNumberOfRankedElements()); for (IDyad dyad : this) { INDArray dyadVector = Nd4j.create(dyad.toDoubleVector()); dyadList.add(dyadVector); } INDArray dyadMatrix; dyadMatrix = Nd4j.vstack(dyadList); return dyadMatrix; } @Override public double[] getPoint() { throw new UnsupportedOperationException("Cannot create vector representation of ranking instance."); } @Override public void removeColumn(int columnPos) { throw new UnsupportedOperationException("Cannot create vector representation of ranking instance."); } @Override public Object[] getAttributes() { return new Object[] { getAttributeValue(0) }; } protected void assertOnlyDyadsInCollection(Collection<?> collection) { boolean noneDyadInCollection = collection.stream().anyMatch(o -> !(o instanceof IDyad)); if (noneDyadInCollection) { throw new IllegalArgumentException("Cannot set collection with non-dyad element as attribute value."); } } protected void assertNonEmptyCollection(Collection<?> collection) { if (collection.isEmpty()) { throw new IllegalArgumentException("Cannot work with empty collection."); } } @Override public abstract Set<IDyad> getAttributeValue(final int pos); public Set<IDyad> getDyads() { return getAttributeValue(0); } public abstract void setDyads(Set<IDyad> dyads); public abstract void setRanking(Ranking<IDyad> ranking); @Override public void setLabel(Object obj) { if (!(obj instanceof List)) { throw new IllegalArgumentException("Label " + obj + " is not of type List."); } List<?> list = (List<?>) obj; assertNonEmptyCollection(list); assertOnlyDyadsInCollection(list); Set<IDyad> dyads = getDyads(); boolean unknownDyadInList = list.stream().anyMatch(o -> !(dyads.contains(o))); if (unknownDyadInList) { throw new IllegalArgumentException("Cannot set list with unknown dyad element as label."); } Ranking<IDyad> ranking = new Ranking<>(list.stream().map(o -> (IDyad) o).collect(Collectors.toList())); setRanking(ranking); } @Override public void setAttributeValue(int pos, Object value) { if (!(value instanceof Collection)) { throw new IllegalArgumentException("Attribute value " + value + " is not of type Collection."); } Collection<?> collection = (Collection<?>) value; assertNonEmptyCollection(collection); assertOnlyDyadsInCollection(collection); Set<IDyad> dyads = collection.stream().map(o -> (IDyad) o).collect(Collectors.toSet()); setDyads(dyads); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/AGeneralDatasetBackedDataset.java
package ai.libs.jaicore.ml.ranking.dyad.dataset; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance; import ai.libs.jaicore.ml.core.dataset.Dataset; public class AGeneralDatasetBackedDataset<E extends ILabeledInstance> implements List<E> { private Dataset dataset; public AGeneralDatasetBackedDataset() { } public AGeneralDatasetBackedDataset(Dataset dataset) { this.dataset = dataset; } protected Dataset getInternalDataset() { return dataset; } protected void setInternalDataset(Dataset internalDataset) { this.dataset = internalDataset; } @Override public int size() { return dataset.size(); } @Override public boolean isEmpty() { return dataset.isEmpty(); } @Override public boolean contains(Object o) { return dataset.contains(o); } @SuppressWarnings("unchecked") @Override public Iterator<E> iterator() { return (Iterator<E>) dataset.iterator(); } @Override public Object[] toArray() { return dataset.toArray(); } @Override public <T> T[] toArray(T[] a) { return dataset.toArray(a); } @Override public boolean add(E e) { return dataset.add(e); } @Override public boolean remove(Object o) { return dataset.remove(o); } @Override public boolean containsAll(Collection<?> c) { return dataset.containsAll(c); } @Override public boolean addAll(Collection<? extends E> c) { return dataset.addAll(c); } @Override public boolean addAll(int index, Collection<? extends E> c) { return dataset.addAll(index, c); } @Override public boolean removeAll(Collection<?> c) { return dataset.removeAll(c); } @Override public boolean retainAll(Collection<?> c) { return dataset.retainAll(c); } @Override public void clear() { dataset.clear(); } @SuppressWarnings("unchecked") @Override public E get(int index) { return (E) dataset.get(index); } @SuppressWarnings("unchecked") @Override public E set(int index, E element) { return (E) dataset.set(index, element); } @Override public void add(int index, E element) { dataset.add(index, element); } @SuppressWarnings("unchecked") @Override public E remove(int index) { return (E) dataset.remove(index); } @Override public int indexOf(Object o) { return dataset.indexOf(o); } @Override public int lastIndexOf(Object o) { return dataset.lastIndexOf(o); } @SuppressWarnings("unchecked") @Override public ListIterator<E> listIterator() { return (ListIterator<E>) dataset.listIterator(); } @SuppressWarnings("unchecked") @Override public ListIterator<E> listIterator(int index) { return (ListIterator<E>) dataset.listIterator(index); } @SuppressWarnings("unchecked") @Override public List<E> subList(int fromIndex, int toIndex) { return (List<E>) dataset.subList(fromIndex, toIndex); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/DenseDyadRankingInstance.java
package ai.libs.jaicore.ml.ranking.dyad.dataset; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.api4.java.ai.ml.ranking.IRanking; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance; import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking; /** * * * @author Helena Graf * @author Alexander Tornede * */ public class DenseDyadRankingInstance extends ADyadRankingInstance { private Set<IDyad> dyads; private Ranking<IDyad> rankingOverDyads; public DenseDyadRankingInstance(final Set<IDyad> dyads) { this.dyads = new HashSet<>(dyads); this.rankingOverDyads = new Ranking<>(); } public DenseDyadRankingInstance(final List<IDyad> dyads) { this.dyads = new HashSet<>(dyads); this.rankingOverDyads = new Ranking<>(dyads); } @Override public Iterator<IDyad> iterator() { return this.dyads.iterator(); } @Override public Set<IDyad> getAttributeValue(final int pos) { if (pos == 0) { return Collections.unmodifiableSet(this.dyads); } throw new IllegalArgumentException("No attribute at position " + pos + "."); } @Override public IRanking<IDyad> getLabel() { return this.rankingOverDyads; } @Override public int getNumberOfRankedElements() { return this.dyads.size(); } @Override public boolean equals(final Object o) { if (!(o instanceof IDyadRankingInstance)) { return false; } IDyadRankingInstance drInstance = (IDyadRankingInstance) o; for (int i = 0; i < drInstance.getNumberOfRankedElements(); i++) { if (!(drInstance.getAttributeValue(i)).equals(this.getAttributeValue(i))) { return false; } } return drInstance.getLabel().equals(this.getLabel()); } @Override public int hashCode() { int result = 42; result = result * 31 + this.dyads.hashCode(); return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("DyadRankingInstance: "); builder.append(this.dyads); return builder.toString(); } @Override public void setDyads(final Set<IDyad> dyads) { this.dyads = dyads; } @Override public void setRanking(final Ranking<IDyad> ranking) { this.rankingOverDyads = new Ranking<>(ranking); } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/DyadRankingDataset.java
package ai.libs.jaicore.ml.ranking.dyad.dataset; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import org.api4.java.ai.ml.core.dataset.IDataset; import org.api4.java.ai.ml.core.dataset.schema.ILabeledInstanceSchema; import org.api4.java.ai.ml.core.dataset.schema.attribute.IAttribute; import org.api4.java.ai.ml.core.exception.DatasetCreationException; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance; import org.api4.java.common.math.IVector; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector; import ai.libs.jaicore.ml.core.dataset.Dataset; import ai.libs.jaicore.ml.core.dataset.schema.LabeledInstanceSchema; import ai.libs.jaicore.ml.core.dataset.schema.attribute.DyadRankingAttribute; import ai.libs.jaicore.ml.core.dataset.schema.attribute.SetOfObjectsAttribute; import ai.libs.jaicore.ml.ranking.dyad.learner.Dyad; /** * A dataset representation for dyad ranking. Contains * {@link IDyadRankingInstance}s. In particular, this dataset is just an * extension to the {@link ArrayList} implementation with typecasts to * {@link IDyadRankingInstance}. * * @author Helena Graf, Mirko Jürgens, Michael Braun, Jonas Hanselle * */ public class DyadRankingDataset extends AGeneralDatasetBackedDataset<IDyadRankingInstance> implements IDyadRankingDataset { private static final String MSG_REMOVAL_FORBIDDEN = "Cannot remove a column for dyad DyadRankingDataset."; private Logger logger = LoggerFactory.getLogger(DyadRankingDataset.class); private LabeledInstanceSchema labeledInstanceSchema; public DyadRankingDataset() { this(""); } public DyadRankingDataset(final String relationName) { this.createInstanceSchema(relationName); this.setInternalDataset(new Dataset(this.labeledInstanceSchema)); } public DyadRankingDataset(final LabeledInstanceSchema labeledInstanceSchema) { this.labeledInstanceSchema = labeledInstanceSchema.getCopy(); this.setInternalDataset(new Dataset(this.labeledInstanceSchema)); } public DyadRankingDataset(final String relationName, final Collection<IDyadRankingInstance> c) { this(relationName); this.addAll(c); } public DyadRankingDataset(final Collection<IDyadRankingInstance> c) { this("", c); } private void createInstanceSchema(final String relationName) { IAttribute dyadSetAttribute = new SetOfObjectsAttribute<>("dyads", IDyad.class); IAttribute dyadRankingAttribute = new DyadRankingAttribute("ranking"); this.labeledInstanceSchema = new LabeledInstanceSchema(relationName, Arrays.asList(dyadSetAttribute), dyadRankingAttribute); } public void serialize(final OutputStream out) { // currently, this always creates a dense dyad representation of the dyad // ranking dataset try { for (IDyadRankingInstance instance : this) { for (IDyad dyad : instance) { out.write(dyad.getContext().toString().getBytes()); out.write(";".getBytes()); out.write(dyad.getAlternative().toString().getBytes()); out.write("|".getBytes()); } out.write("\n".getBytes()); } } catch (IOException e) { this.logger.warn(e.getMessage()); } } public void deserialize(final InputStream in) { // currently, this always creates a dense dyad ranking dataset this.clear(); try { LineIterator input = IOUtils.lineIterator(in, StandardCharsets.UTF_8); while (input.hasNext()) { String row = input.next(); if (row.isEmpty()) { break; } List<IDyad> dyads = new LinkedList<>(); String[] dyadTokens = row.split("\\|"); for (String dyadString : dyadTokens) { String[] values = dyadString.split(";"); if (values[0].length() > 1 && values[1].length() > 1) { String[] instanceValues = values[0].substring(1, values[0].length() - 1).split(","); String[] alternativeValues = values[1].substring(1, values[1].length() - 1).split(","); IVector instance = new DenseDoubleVector(instanceValues.length); for (int i = 0; i < instanceValues.length; i++) { instance.setValue(i, Double.parseDouble(instanceValues[i])); } IVector alternative = new DenseDoubleVector(alternativeValues.length); for (int i = 0; i < alternativeValues.length; i++) { alternative.setValue(i, Double.parseDouble(alternativeValues[i])); } IDyad dyad = new Dyad(instance, alternative); dyads.add(dyad); } } this.add(new DenseDyadRankingInstance(dyads)); } } catch (IOException e) { this.logger.warn(e.getMessage()); } } @Override public boolean equals(final Object o) { if (!(o instanceof DyadRankingDataset)) { return false; } DyadRankingDataset dataset = (DyadRankingDataset) o; if (dataset.size() != this.size()) { return false; } for (int i = 0; i < dataset.size(); i++) { IDyadRankingInstance i1 = this.get(i); IDyadRankingInstance i2 = dataset.get(i); if (!i1.equals(i2)) { return false; } } return true; } @Override public int hashCode() { int result = 17; for (IDyadRankingInstance instance : this) { result = result * 31 + instance.hashCode(); } return result; } /** * Converts this data set to a list of ND4j {@link INDArray}s. * Each dyad ranking is represented by a 2D-matrix where a row is a dyad. * * @return */ public List<INDArray> toND4j() { List<INDArray> ndList = new ArrayList<>(); for (IDyadRankingInstance instance : this) { IDyadRankingInstance drInstance = instance; ndList.add(this.dyadRankingToMatrix(drInstance)); } return ndList; } /** * Converts a dyad to a {@link INDArray} row vector consisting of a * concatenation of the instance and alternative features. * * @param dyad The dyad to convert. * @return The dyad in {@link INDArray} row vector form. */ private INDArray dyadToVector(final IDyad dyad) { INDArray instanceOfDyad = Nd4j.create(dyad.getContext().asArray()); INDArray alternativeOfDyad = Nd4j.create(dyad.getAlternative().asArray()); return Nd4j.hstack(instanceOfDyad, alternativeOfDyad); } /** * Converts a dyad ranking to a {@link INDArray} matrix where each row * corresponds to a dyad. * * @param drInstance The dyad ranking to convert to a matrix. * @return The dyad ranking in {@link INDArray} matrix form. */ private INDArray dyadRankingToMatrix(final IDyadRankingInstance drInstance) { List<INDArray> dyadList = new ArrayList<>(drInstance.getNumberOfRankedElements()); for (IDyad dyad : drInstance) { INDArray dyadVector = this.dyadToVector(dyad); dyadList.add(dyadVector); } INDArray dyadMatrix; dyadMatrix = Nd4j.vstack(dyadList); return dyadMatrix; } @Override public ILabeledInstanceSchema getInstanceSchema() { return this.labeledInstanceSchema; } @Override public Object[] getLabelVector() { return this.getInternalDataset().getLabelVector(); } @Override public DyadRankingDataset createEmptyCopy() { return new DyadRankingDataset(this.labeledInstanceSchema); } @Override public Object[][] getFeatureMatrix() { return this.getInternalDataset().getFeatureMatrix(); } @Override public void removeColumn(final int columnPos) { throw new UnsupportedOperationException(MSG_REMOVAL_FORBIDDEN); } @Override public void removeColumn(final String columnName) { throw new UnsupportedOperationException(MSG_REMOVAL_FORBIDDEN); } @Override public void removeColumn(final IAttribute attribute) { throw new UnsupportedOperationException(MSG_REMOVAL_FORBIDDEN); } @Override public IDataset<IDyadRankingInstance> createCopy() throws DatasetCreationException, InterruptedException { DyadRankingDataset copy = this.createEmptyCopy(); for (IDyadRankingInstance i : this) { copy.add(i); } return copy; } }
0
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/INDArrayDyadRankingInstance.java
package ai.libs.jaicore.ml.ranking.dyad.dataset; import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance; import org.nd4j.linalg.api.ndarray.INDArray; public interface INDArrayDyadRankingInstance extends IDyadRankingInstance { /** * Converts a dyad ranking to a {@link INDArray} matrix where each row * corresponds to a dyad. * * @return The dyad ranking in {@link INDArray} matrix form. */ public INDArray toMatrix(); }