index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/dataset/SparseDyadRankingInstance.java
|
package ai.libs.jaicore.ml.ranking.dyad.dataset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.stream.Collectors;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.ml.ranking.dyad.learner.Dyad;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking;
/**
* A dyad ranking instance implementation that assumes the same instance for all
* dyads contained in its ordering. It saves the instance and alternatives
* separately and contstructs dyads from them on request.
*
* @author Helena Graf, Mirko Jürgens
*
*/
public class SparseDyadRankingInstance extends ADyadRankingInstance {
private IVector context;
private Set<IVector> alternatives;
private Ranking<IVector> rankedAlternatives;
public SparseDyadRankingInstance(final IVector context, final Set<IVector> alternatives) {
this.context = context;
this.alternatives = new HashSet<>(alternatives);
this.rankedAlternatives = new Ranking<>();
}
public SparseDyadRankingInstance(final IVector context, final List<IVector> alternatives) {
this.context = context;
this.alternatives = new HashSet<>(alternatives);
this.rankedAlternatives = new Ranking<>(alternatives);
}
@Override
public Set<IDyad> getAttributeValue(final int position) {
if (position == 0) {
return new HashSet<>(this.alternatives.stream().map(y -> new Dyad(this.context, y)).collect(Collectors.toList()));
}
throw new IllegalArgumentException("No attribute at position " + position + ".");
}
@Override
public IRanking<IDyad> getLabel() {
return new Ranking<>(this.rankedAlternatives.stream().map(y -> new Dyad(this.context, y)).collect(Collectors.toList()));
}
@Override
public Iterator<IDyad> iterator() {
return new Iterator<IDyad>() {
private int index = 0;
private List<IDyad> dyads = new ArrayList<>(SparseDyadRankingInstance.this.getAttributeValue(0));
@Override
public boolean hasNext() {
return this.index < SparseDyadRankingInstance.this.getNumberOfRankedElements();
}
@Override
public IDyad next() {
if (!this.hasNext()) {
throw new NoSuchElementException();
}
return this.dyads.get(this.index++);
}
};
}
public IVector getContext() {
return this.context;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SparseDyadRankingInstance: ");
builder.append(System.lineSeparator());
builder.append("Instance: ");
builder.append(this.context);
builder.append(System.lineSeparator());
builder.append("Alternatives: ");
builder.append(this.alternatives);
return builder.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.alternatives == null) ? 0 : this.alternatives.hashCode());
result = prime * result + ((this.context == null) ? 0 : this.context.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
SparseDyadRankingInstance other = (SparseDyadRankingInstance) obj;
if (this.alternatives == null) {
if (other.alternatives != null) {
return false;
}
} else if (!this.alternatives.equals(other.alternatives)) {
return false;
}
if (this.context == null) {
if (other.context != null) {
return false;
}
} else if (!this.context.equals(other.context)) {
return false;
}
return true;
}
@Override
public int getNumberOfRankedElements() {
return this.alternatives.size();
}
@Override
public void setDyads(final Set<IDyad> dyads) {
this.assertThatAllContextsAreIdentical(dyads);
this.context = dyads.iterator().next().getContext();
this.alternatives = dyads.stream().map(IDyad::getAlternative).collect(Collectors.toSet());
}
@Override
public void setRanking(final Ranking<IDyad> ranking) {
this.assertThatAllContextsAreIdentical(ranking);
}
private void assertThatAllContextsAreIdentical(final Collection<IDyad> dyads) {
IDyad anyDyad = dyads.iterator().next();
boolean allContextsIdentical = dyads.stream().allMatch(d -> d.getContext().equals(anyDyad.getContext()));
if (!allContextsIdentical) {
throw new IllegalArgumentException("For a sparse dyad ranking instance, all contexts have to be identical.");
}
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/Dyad.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.common.math.IVector;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
/**
* Represents a dyad consisting of an instance and an alternative, represented
* by feature vectors.
*
* @author Helena Graf
*
*/
public class Dyad implements IDyad {
/* The 'x' value of the dyad */
private IVector context;
/* The 'y' value of the dyad */
private IVector alternative;
/**
* Construct a new dyad consisting of the given instance and alternative.
*
* @param instance
* The instance
* @param alternative
* The alternative
*/
public Dyad(final IVector instance, final IVector alternative) {
this.context = instance;
this.alternative = alternative;
}
/**
* Get the instance.
*
* @return the instance
*/
@Override
public IVector getContext() {
return this.context;
}
/**
* Get the alternative.
*
* @return the alternative
*/
@Override
public IVector getAlternative() {
return this.alternative;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("Dyad (");
builder.append("instance (");
builder.append(this.context);
builder.append(")");
builder.append("alternative (");
builder.append(this.alternative);
builder.append(")");
builder.append(")");
return builder.toString();
}
@Override
public boolean equals(final Object o) {
if (!(o instanceof Dyad)) {
return false;
}
Dyad other = (Dyad) o;
if (((this.context != null && other.context != null && this.alternative != null && other.alternative != null) && other.context.equals(this.context) && other.alternative.equals(this.alternative))) {
return (this.context.equals(other.context) && this.alternative.equals(other.alternative));
} else if ((this.context == null && other.context == null && this.alternative == null && other.alternative == null)) {
return true;
}
return false;
}
@Override
public int hashCode() {
int result = 42;
result = result * 31 + this.context.hashCode();
result = result * 31 + this.alternative.hashCode();
return result;
}
/**
* Converts a dyad to a {@link INDArray} row vector consisting of a
* concatenation of the instance and alternative features.
*
* @return The dyad in {@link INDArray} row vector form.
*/
public INDArray toVector() {
INDArray instanceOfDyad = Nd4j.create(this.getContext().asArray());
INDArray alternativeOfDyad = Nd4j.create(this.getAlternative().asArray());
return Nd4j.hstack(instanceOfDyad, alternativeOfDyad);
}
@Override
public double[] toDoubleVector() {
double[] array = new double[this.getContext().length() + this.getAlternative().length()];
this.getContext().asArray();
System.arraycopy(array, 0, this.getContext(), 0, this.getContext().length());
return array;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/ARandomlyInitializingDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
public abstract class ARandomlyInitializingDyadRanker extends ActiveDyadRanker {
private final Logger logger = LoggerFactory.getLogger(ARandomlyInitializingDyadRanker.class);
private final int numberRandomQueriesAtStart;
private final Map<IDyad, SummaryStatistics> dyadStats;
private final List<IVector> instanceFeatures;
private final Random random;
private final int minibatchSize;
private int iteration;
public ARandomlyInitializingDyadRanker(final PLNetDyadRanker ranker, final IDyadRankingPoolProvider poolProvider, final int seed, final int numberRandomQueriesAtStart, final int minibatchSize) {
super(ranker, poolProvider);
this.dyadStats = new HashMap<>();
this.instanceFeatures = new ArrayList<>(poolProvider.getInstanceFeatures());
this.numberRandomQueriesAtStart = numberRandomQueriesAtStart;
this.minibatchSize = minibatchSize;
this.iteration = 0;
for (IVector instance : this.instanceFeatures) {
for (IDyad dyad : poolProvider.getDyadsByInstance(instance)) {
this.dyadStats.put(dyad, new SummaryStatistics());
}
}
this.random = new Random(seed);
}
@Override
public void activelyTrain(final int numberOfQueries) throws TrainingException, InterruptedException {
for (int i = 0; i < numberOfQueries; i++) {
if (this.iteration < this.numberRandomQueriesAtStart) {
DyadRankingDataset minibatch = new DyadRankingDataset();
for (int batchIndex = 0; batchIndex < this.minibatchSize; batchIndex++) {
// get random instance
Collections.shuffle(this.instanceFeatures, this.random);
if (this.instanceFeatures.isEmpty()) {
break;
}
IVector instance = this.instanceFeatures.get(0);
// get random pair of dyads
List<IDyad> dyads = new ArrayList<>(this.poolProvider.getDyadsByInstance(instance));
Collections.shuffle(dyads, this.random);
// query them
LinkedList<IVector> alternatives = new LinkedList<>();
alternatives.add(dyads.get(0).getAlternative());
alternatives.add(dyads.get(1).getAlternative());
SparseDyadRankingInstance queryInstance = new SparseDyadRankingInstance(dyads.get(0).getContext(), alternatives);
IDyadRankingInstance trueRanking = this.poolProvider.query(queryInstance);
minibatch.add(trueRanking);
}
// feed it to the ranker
try {
this.updateRanker(minibatch);
} catch (TrainingException e) {
this.logger.error("Updating the dyad ranking learner did not succeed.", e);
}
} else {
this.activelyTrainWithOneInstance();
}
this.iteration++;
}
}
public int getNumberRandomQueriesAtStart() {
return this.numberRandomQueriesAtStart;
}
public int getIteration() {
return this.iteration;
}
public Map<IDyad, SummaryStatistics> getDyadStats() {
return this.dyadStats;
}
public List<IVector> getInstanceFeatures() {
return this.instanceFeatures;
}
public Random getRandom() {
return this.random;
}
public int getMinibatchSize() {
return this.minibatchSize;
}
@Override
public abstract void activelyTrainWithOneInstance() throws TrainingException, InterruptedException;
public void updateRanker(final DyadRankingDataset minibatch) throws TrainingException, InterruptedException {
this.ranker.fit(minibatch);
// update variances (confidence)
for (IVector inst : this.getInstanceFeatures()) {
for (IDyad dyad : this.poolProvider.getDyadsByInstance(inst)) {
double skill = this.ranker.getSkillForDyad(dyad);
this.dyadStats.get(dyad).addValue(skill);
}
}
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/ActiveDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import org.api4.java.ai.ml.core.exception.TrainingException;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
/**
* Abstract description of a pool-based active learning strategy for dyad
* ranking.
*
* @author Jonas Hanselle
*
*/
public abstract class ActiveDyadRanker {
protected PLNetDyadRanker ranker;
protected IDyadRankingPoolProvider poolProvider;
/**
*
* @param ranker The {@link PLNetDyadRanker} that is actively trained.
* @param poolProvider The {@link IDyadRankingPoolProvider} that provides a pool
* for pool-based selective sampling
*/
public ActiveDyadRanker(final PLNetDyadRanker ranker, final IDyadRankingPoolProvider poolProvider) {
this.ranker = ranker;
this.poolProvider = poolProvider;
}
/**
* Actively trains the ranker for a certain number of queries.
*
* @param numberOfQueries Number of queries the ranker conducts
* @throws TrainingException
* @throws InterruptedException
*/
public void activelyTrain(final int numberOfQueries) throws TrainingException, InterruptedException {
for (int i = 0; i < numberOfQueries; i++) {
this.activelyTrainWithOneInstance();
}
}
public abstract void activelyTrainWithOneInstance() throws TrainingException, InterruptedException;
public PLNetDyadRanker getRanker() {
return this.ranker;
}
public void setRanker(final PLNetDyadRanker ranker) {
this.ranker = ranker;
}
public IDyadRankingPoolProvider getPoolProvider() {
return this.poolProvider;
}
public void setPoolProvider(final IDyadRankingPoolProvider poolProvider) {
this.poolProvider = poolProvider;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/DyadDatasetPoolProvider.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import org.nd4j.linalg.primitives.Pair;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DenseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance;
/**
* A pool provider which is created out of a {@link DyadRankingDataset}. Each
* {@link SparseDyadRankingInstance} or {@link DenseDyadRankingInstance} of the
* {@link DyadRankingDataset} must represent a full ranking. Only queries of
* rankings over the same instance features can be answered.
*
* @author Jonas Hanselle
*
*/
public class DyadDatasetPoolProvider implements IDyadRankingPoolProvider {
private HashMap<IVector, Set<IDyad>> dyadsByInstances;
private HashMap<IVector, Set<IDyad>> dyadsByAlternatives;
private HashMap<IVector, IDyadRankingInstance> dyadRankingsByInstances;
private HashMap<IVector, IDyadRankingInstance> dyadRankingsByAlternatives;
private List<IDyadRankingInstance> pool;
private boolean removeDyadsWhenQueried;
private HashSet<IDyadRankingInstance> queriedRankings;
private int numberQueries;
public DyadDatasetPoolProvider(final IDyadRankingDataset dataset) {
this.numberQueries = 0;
this.removeDyadsWhenQueried = false;
this.dyadsByInstances = new HashMap<>();
this.dyadsByAlternatives = new HashMap<>();
this.dyadRankingsByInstances = new HashMap<>();
this.dyadRankingsByAlternatives = new HashMap<>();
this.pool = new ArrayList<>(dataset.size());
for (IDyadRankingInstance instance : dataset) {
this.addDyadRankingInstance(instance);
}
this.queriedRankings = new HashSet<>();
}
@Override
public Collection<IDyadRankingInstance> getPool() {
return this.pool;
}
@Override
public IDyadRankingInstance query(final IDyadRankingInstance queryInstance) {
this.numberQueries++;
if (!(queryInstance instanceof SparseDyadRankingInstance)) {
throw new IllegalArgumentException("Currently only supports SparseDyadRankingInstances!");
}
SparseDyadRankingInstance drInstance = (SparseDyadRankingInstance) queryInstance;
List<Pair<IDyad, Integer>> dyadPositionPairs = new ArrayList<>(drInstance.getNumberOfRankedElements());
for (IDyad dyad : drInstance) {
int position = this.getPositionInRankingByInstanceFeatures(dyad);
dyadPositionPairs.add(new Pair<>(dyad, position));
}
// sort the instance in descending order of utility values
Collections.sort(dyadPositionPairs, Comparator.comparing(Pair<IDyad, Integer>::getRight));
List<IDyad> dyadList = new ArrayList<>(dyadPositionPairs.size());
for (Pair<IDyad, Integer> pair : dyadPositionPairs) {
dyadList.add(pair.getFirst());
}
IDyadRankingInstance trueRanking = new DenseDyadRankingInstance(dyadList);
if (this.removeDyadsWhenQueried) {
for (IDyad dyad : dyadList) {
this.removeDyadFromPool(dyad);
}
}
this.queriedRankings.add(trueRanking);
return trueRanking;
}
@Override
public Set<IDyad> getDyadsByInstance(final IVector instanceFeatures) {
if (!this.dyadsByInstances.containsKey(instanceFeatures)) {
return new HashSet<>();
}
return this.dyadsByInstances.get(instanceFeatures);
}
@Override
public Set<IDyad> getDyadsByAlternative(final IVector alternativeFeatures) {
if (!this.dyadsByAlternatives.containsKey(alternativeFeatures)) {
return new HashSet<>();
}
return this.dyadsByAlternatives.get(alternativeFeatures);
}
/**
* Adds a {@link IDyadRankingInstance} instance to the pool.
*
* @param instance
*/
private void addDyadRankingInstance(final IDyadRankingInstance instance) {
// Add the dyad ranking instance to the pool
this.pool.add(instance);
// Add the dyad ranking instances to the hash maps
this.dyadRankingsByInstances.put(instance.getLabel().get(0).getContext(), instance);
this.dyadRankingsByAlternatives.put(instance.getLabel().get(0).getAlternative(), instance);
for (IDyad dyad : instance) {
// Add all dyads to the HashMap with instance features as key
if (!this.dyadsByInstances.containsKey(dyad.getContext())) {
this.dyadsByInstances.put(dyad.getContext(), new HashSet<IDyad>());
}
this.dyadsByInstances.get(dyad.getContext()).add(dyad);
// Add all dyads to the HashMap with alternative features as key
if (!this.dyadsByAlternatives.containsKey(dyad.getAlternative())) {
this.dyadsByAlternatives.put(dyad.getAlternative(), new HashSet<IDyad>());
}
this.dyadsByAlternatives.get(dyad.getAlternative()).add(dyad);
}
}
/**
* Returns the position of a dyad in the ranking over the same instance
* features. Returns -1 if the ranking does not contain the dyad.
*
* @param dyad
* @return Position of the dyad in the ranking, -1 if the ranking does not
* contain the dyad.
*/
private int getPositionInRankingByInstanceFeatures(final IDyad dyad) {
if (!this.dyadRankingsByInstances.containsKey(dyad.getContext())) {
return -1;
}
IDyadRankingInstance ranking = this.dyadRankingsByInstances.get(dyad.getContext());
boolean found = false;
int curPos = 0;
while (curPos < ranking.getNumberOfRankedElements() && !found) {
IDyad dyadInRanking = ranking.getLabel().get(curPos);
if (dyadInRanking.equals(dyad)) {
found = true;
} else {
curPos++;
}
}
return curPos;
}
@Override
public Collection<IVector> getInstanceFeatures() {
return this.dyadsByInstances.keySet();
}
private void removeDyadFromPool(final IDyad dyad) {
if (this.dyadsByInstances.containsKey(dyad.getContext())) {
this.dyadsByInstances.get(dyad.getContext()).remove(dyad);
if (this.dyadsByInstances.get(dyad.getContext()).size() < 2) {
this.dyadsByInstances.remove(dyad.getContext());
}
}
if (this.dyadsByAlternatives.containsKey(dyad.getAlternative())) {
this.dyadsByAlternatives.get(dyad.getAlternative()).remove(dyad);
if (this.dyadsByAlternatives.get(dyad.getAlternative()).size() < 2) {
this.dyadsByAlternatives.remove(dyad.getAlternative());
}
}
}
@Override
public void setRemoveDyadsWhenQueried(final boolean flag) {
this.removeDyadsWhenQueried = flag;
}
@Override
public int getPoolSize() {
int size = 0;
for (Set<IDyad> set : this.dyadsByInstances.values()) {
size += set.size();
}
return size;
}
/**
* Returns the number of queries the pool provider has answered so far.
*
* @return Number of queries this pool provider has answered.
*/
public int getNumberQueries() {
return this.numberQueries;
}
@Override
public DyadRankingDataset getQueriedRankings() {
return new DyadRankingDataset(new ArrayList<>(this.queriedRankings));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/IDyadRankingPoolProvider.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import java.util.Collection;
import java.util.Set;
import org.api4.java.ai.ml.core.learner.active.IActiveLearningPoolProvider;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.learner.Dyad;
/**
* Interface for an active learning pool provider in the context of dyad
* ranking. It offers access to the pool of dyads both by instance features and
* alternative features.
*
* @author Jonas Hanselle
*
*/
public interface IDyadRankingPoolProvider extends IActiveLearningPoolProvider<IDyadRankingInstance> {
/**
* Returns the set of all {@link Dyad}s with the given {@link IVector} of
* instance features.
*
* @param instanceFeatures {@link IVector} of instance features.
* @return {@link Set} of dyads with the given {@link IVector} of instance
* features.
*/
public Set<IDyad> getDyadsByInstance(IVector instanceFeatures);
/**
* Returns the set of all {@link Dyad}s with the given {@link IVector} of
* alternative features.
*
* @param alternativeFeatures {@link IVector} of alternative features.
* @return {@link Set} of dyads with the given {@link IVector} of alternative
* features.
*/
public Set<IDyad> getDyadsByAlternative(IVector alternativeFeatures);
/**
* Returns a {@link Collection} that contains all instance features contained in
* the pool.
*
* @return A {@link Collection} that contains all instance features contained in
* the pool.
*/
public Collection<IVector> getInstanceFeatures();
public void setRemoveDyadsWhenQueried(boolean flag);
public int getPoolSize();
public DyadRankingDataset getQueriedRankings();
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/PrototypicalPoolBasedActiveDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import org.nd4j.linalg.primitives.Pair;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
/**
* A prototypical active dyad ranker based on the idea of uncertainty sampling.
* First a constant number of random queries is sampled at the beginning. Then
* the sampling strategy randomly selects a problem instance in each query step.
* Afterwards it selects those two alternatives for pairwise comparison, for
* which the difference of the skill values is minimal, as these are the pairs
* the Plackett Luce model is least certain about. This procedure is repeated a
* constant number of times to create a minibatch for updating the model.
*
* @author Jonas Hanselle
*
*/
public class PrototypicalPoolBasedActiveDyadRanker extends ARandomlyInitializingDyadRanker {
private ArrayList<IDyadRankingInstance> seenInstances;
private double ratioOfOldInstancesForMinibatch;
private int lengthOfTopRankingToConsider;
public PrototypicalPoolBasedActiveDyadRanker(final PLNetDyadRanker ranker, final IDyadRankingPoolProvider poolProvider, final int maxBatchSize, final int lengthOfTopRankingToConsider, final double ratioOfOldInstancesForMinibatch,
final int numberRandomQueriesAtStart, final int seed) {
super(ranker, poolProvider, seed, numberRandomQueriesAtStart, maxBatchSize);
this.seenInstances = new ArrayList<>(poolProvider.getPool().size());
this.ratioOfOldInstancesForMinibatch = ratioOfOldInstancesForMinibatch;
this.lengthOfTopRankingToConsider = lengthOfTopRankingToConsider;
}
@Override
public void activelyTrainWithOneInstance() throws TrainingException, InterruptedException {
// get the instance feature vector for which the top ranking has the lowest
// probability, d^star in the paper
DyadRankingDataset minibatch = new DyadRankingDataset();
List<Pair<IVector, Double>> dStarWithProbability = new ArrayList<>(this.getMinibatchSize());
for (IVector instanceFeatures : this.poolProvider.getInstanceFeatures()) {
dStarWithProbability.add(new Pair<>(instanceFeatures, 54d));
}
Collections.shuffle(dStarWithProbability);
int numberOfOldInstances = Integer.min((int) (this.ratioOfOldInstancesForMinibatch * this.getMinibatchSize()), this.seenInstances.size());
int numberOfNewInstances = this.getMinibatchSize() - numberOfOldInstances;
for (int batchIndex = 0; batchIndex < numberOfNewInstances; batchIndex++) {
IVector curDStar = dStarWithProbability.get(batchIndex).getFirst();
List<IDyad> dyads = new ArrayList<>(this.poolProvider.getDyadsByInstance(curDStar));
if (dyads.size() < 2) {
break;
}
IVector instance = dyads.get(0).getContext();
List<IVector> alternatives = new ArrayList<>(dyads.size());
for (IDyad dyad : dyads) {
alternatives.add(dyad.getAlternative());
}
SparseDyadRankingInstance queryRanking = new SparseDyadRankingInstance(instance, alternatives);
// get the alternatives pair for which the PLNet is most uncertain
IDyadRankingInstance queryPair = this.ranker.getPairWithLeastCertainty(queryRanking);
// convert to SparseDyadRankingInstance
List<IVector> alternativePair = new ArrayList<>(queryPair.getNumberOfRankedElements());
for (IDyad dyad : queryPair) {
alternativePair.add(dyad.getAlternative());
}
SparseDyadRankingInstance sparseQueryPair = new SparseDyadRankingInstance(queryPair.getLabel().get(0).getContext(), alternativePair);
// query the pool provider to get the ground truth ranking for the pair
IDyadRankingInstance groundTruthPair = this.poolProvider.query(sparseQueryPair);
this.seenInstances.add(groundTruthPair);
minibatch.add(groundTruthPair);
}
// Select a portion of random instances that have already been queried and add
// them to the minibatch
Collections.shuffle(this.seenInstances);
List<IDyadRankingInstance> oldInstances = this.seenInstances.subList(0, numberOfOldInstances);
minibatch.addAll(oldInstances);
this.updateRanker(minibatch);
}
public double getRatioOfOldInstancesForMinibatch() {
return this.ratioOfOldInstancesForMinibatch;
}
public void setRatioOfOldInstancesForMinibatch(final double ratioOfOldInstancesForMinibatch) {
this.ratioOfOldInstancesForMinibatch = ratioOfOldInstancesForMinibatch;
}
public int getLengthOfTopRankingToConsider() {
return this.lengthOfTopRankingToConsider;
}
public void setLengthOfTopRankingToConsider(final int lengthOfTopRankingToConsider) {
this.lengthOfTopRankingToConsider = lengthOfTopRankingToConsider;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/RandomPoolBasedActiveDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
/**
* A random active dyad ranker. The sampling strategy picks a problem instance
* at random and then picks two alternatives at random for pairwise comparison.
* This is repeated for a constant number of times to create a minibatch for
* updating the ranker.
*
* @author Jonas Hanselle
*
*/
public class RandomPoolBasedActiveDyadRanker extends ARandomlyInitializingDyadRanker {
public RandomPoolBasedActiveDyadRanker(PLNetDyadRanker ranker, IDyadRankingPoolProvider poolProvider, int maxBatchSize, int seed) {
super(ranker, poolProvider, seed, Integer.MAX_VALUE, maxBatchSize);
}
@Override
public void activelyTrainWithOneInstance() {
/* this is never called */
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/activelearning/UCBPoolBasedActiveDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.activelearning;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import org.nd4j.linalg.primitives.Pair;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
/**
* A prototypical active dyad ranker based on the UCB decision rule. During the
* learning procedure, it keeps track over the standard deviation of the skill
* values predicted for a dyad. First a constant number of random queries is
* sampled at the beginning. Then the sampling strategy randomly selects problem
* instances and picks the two dyads with largest skill + standard deviation for
* pairwise comparison. On each query step, this is repeated a constant number
* of times to create a minibatch.
*
* @author Jonas Hanselle
*
*/
public class UCBPoolBasedActiveDyadRanker extends ARandomlyInitializingDyadRanker {
public UCBPoolBasedActiveDyadRanker(final PLNetDyadRanker ranker, final IDyadRankingPoolProvider poolProvider, final int seed, final int numberRandomQueriesAtStart, final int minibatchSize) {
super(ranker, poolProvider, seed, numberRandomQueriesAtStart, minibatchSize);
}
@Override
public void activelyTrainWithOneInstance() throws TrainingException, InterruptedException {
DyadRankingDataset minibatch = new DyadRankingDataset();
for (int minibatchIndex = 0; minibatchIndex < this.getMinibatchSize(); minibatchIndex++) {
// randomly choose dataset to sample from
int index = this.getRandom().nextInt(this.getInstanceFeatures().size());
IVector problemInstance = this.getInstanceFeatures().get(index);
// update empirical standard deviation and compute upper confidence bound for
// each dyad
// from this dataset
List<IDyad> dyads = new ArrayList<>(this.poolProvider.getDyadsByInstance(problemInstance));
List<Pair<IDyad, Double>> dyadsWithUCB = new ArrayList<>(dyads.size());
for (IDyad dyad : dyads) {
double skill = this.ranker.getSkillForDyad(dyad);
double std = this.getDyadStats().get(dyad).getStandardDeviation();
double ucb = skill + std;
dyadsWithUCB.add(new Pair<>(dyad, ucb));
}
// query the two dyads with highest ucb
Collections.sort(dyadsWithUCB, Comparator.comparing(p -> -p.getRight()));
IDyad d1 = dyadsWithUCB.get(0).getFirst();
IDyad d2 = dyadsWithUCB.get(1).getFirst();
List<IVector> alts = new ArrayList<>(2);
alts.add(d1.getAlternative());
alts.add(d2.getAlternative());
SparseDyadRankingInstance sparseQueryPair = new SparseDyadRankingInstance(d1.getContext(), alts);
IDyadRankingInstance groundTruthPair = this.poolProvider.query(sparseQueryPair);
// add it to the minibatch
minibatch.add(groundTruthPair);
}
// update the ranker
this.updateRanker(minibatch);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/IDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm;
import org.api4.java.ai.ml.core.learner.ISupervisedLearner;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.ai.ml.ranking.learner.IRanker;
/**
* <p>
* An abstract representation of a dyad ranker.
*
* <p>
* "Label ranking is a specific type of preference learning problem, namely the
* prob- lem of learning a model that maps instances to rankings over a finite
* set of predefined alternatives. Like in conventional classification, these
* alternatives are identified by their name or label while not being
* characterized in terms of any properties or features that could be
* potentially useful for learning. In this paper, we consider a generalization
* of the label ranking problem that we call dyad ranking. In dyad ranking, not
* only the instances but also the alter- natives are represented in terms of
* attributes."
*
* <p>
* Schaefer, D., & Huellermeier, E. (2018). Dyad ranking using Plackett--Luce
* models based on joint feature representations. Machine Learning, 107(5),
* 903–941. https://doi.org/10.1007/s10994-017-5694-9
*
* @author Helena Graf
*
*/
public interface IDyadRanker extends ISupervisedLearner<IDyadRankingInstance, IDyadRankingDataset>, IRanker<IDyad, IDyadRankingInstance, IDyadRankingDataset> {
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/IPLDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm;
/**
* An abstract representation for a dyad ranker using Placket Luce models.
*
* @author Helena Graf
*
*/
public interface IPLDyadRanker extends IDyadRanker {
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/IPLNetDyadRankerConfiguration.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm;
import java.util.List;
import org.aeonbits.owner.Config.Sources;
import org.aeonbits.owner.Mutable;
@Sources({ "file:conf/plNet/plnet.properties" })
public interface IPLNetDyadRankerConfiguration extends Mutable {
/**
* The learning rate for the gradient updater.
*/
public static final String K_PLNET_LEARNINGRATE = "plnet.learningrate";
/**
* List of integers describing the architecture of the hidden layers. The i-th
* element represents the number of units in the i-th hidden layer.
*/
public static final String K_PLNET_HIDDEN_NODES = "plnet.hidden.nodes";
/**
* The random seed to use.
*/
public static final String K_PLNET_SEED = "plnet.seed";
/**
* The activation function for the hidden layers. For a list of supported
* functions, see <a href=
* "https://deeplearning4j.org/docs/latest/deeplearning4j-cheat-sheet#config-afn">https://deeplearning4j.org/docs/latest/deeplearning4j-cheat-sheet#config-afn</a>
*/
public static final String K_ACTIVATION_FUNCTION = "plnet.hidden.activation.function";
/**
* The maximum number of epochs to be used during training, i.e. how many times
* the training algorithm should iterate through the entire training data set.
* Set to 0 for no limit apart from early stopping.
*/
public static final String K_MAX_EPOCHS = "plnet.epochs";
/**
* The size of mini batches used during training.
*/
public static final String K_MINI_BATCH_SIZE = "plnet.minibatch.size";
/**
* How often (in epochs) the validation error should be checked for early
* stopping.
*/
public static final String K_EARLY_STOPPING_INTERVAL = "plnet.early.stopping.interval";
/**
* For how many epochs early stopping should wait until training is stopped if
* no improvement in the validation error is observed.
*/
public static final String K_EARLY_STOPPING_PATIENCE = "plnet.early.stopping.patience";
/**
* The ratio of data used for training in early stopping. 1 - this ratio is used
* for testing.
*/
public static final String K_EARLY_STOPPING_TRAIN_RATIO = "plnet.early.stopping.train.ratio";
/**
* Whether to retrain on the full training data after early stopping, using the same number of epochs
* the model was trained for before early stopping occured.
*/
public static final String K_EARLY_STOPPING_RETRAIN = "plnet.early.stopping.retrain";
@Key(K_PLNET_LEARNINGRATE)
@DefaultValue("0.1")
public double plNetLearningRate();
@Key(K_PLNET_HIDDEN_NODES)
@DefaultValue("8")
public List<Integer> plNetHiddenNodes();
@Key(K_PLNET_SEED)
@DefaultValue("42")
public int plNetSeed();
@Key(K_ACTIVATION_FUNCTION)
@DefaultValue("SIGMOID")
public String plNetActivationFunction();
@Key(K_MAX_EPOCHS)
@DefaultValue("25")
public int plNetMaxEpochs();
@Key(K_MINI_BATCH_SIZE)
@DefaultValue("4")
public int plNetMiniBatchSize();
@Key(K_EARLY_STOPPING_INTERVAL)
@DefaultValue("1")
public int plNetEarlyStoppingInterval();
@Key(K_EARLY_STOPPING_PATIENCE)
@DefaultValue("10")
public int plNetEarlyStoppingPatience();
@Key(K_EARLY_STOPPING_TRAIN_RATIO)
@DefaultValue("0.8")
public double plNetEarlyStoppingTrainRatio();
@Key(K_EARLY_STOPPING_RETRAIN)
@DefaultValue("true")
public boolean plNetEarlyStoppingRetrain();
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/PLNetDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.aeonbits.owner.ConfigFactory;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.api4.java.ai.ml.core.dataset.IInstance;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.core.learner.IProbabilisticPredictor;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.IRankingPredictionBatch;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration.ListBuilder;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.primitives.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.basic.FileUtil;
import ai.libs.jaicore.ml.core.learner.ASupervisedLearner;
import ai.libs.jaicore.ml.ranking.RankingPredictionBatch;
import ai.libs.jaicore.ml.ranking.dyad.dataset.ADyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DenseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.learner.Dyad;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking;
/**
* A dyad ranker based on a Plackett-Luce network.
*
*
* All the provided algorithms are implementations of the PLModel introduced in
* [1].
*
* [1] Schäfer, D., & Hüllermeier, E. (2018). Dyad ranking using Plackett--Luce
* models based on joint feature representations. Machine Learning, 107(5),
* 903–941. https://doi.org/10.1007/s10994-017-5694-9
*
* @author Helena Graf, Jonas Hanselle, Michael Braun
*
*/
public class PLNetDyadRanker extends ASupervisedLearner<IDyadRankingInstance, IDyadRankingDataset, IRanking<IDyad>, IRankingPredictionBatch> implements IPLDyadRanker, IProbabilisticPredictor {
private static final Logger log = LoggerFactory.getLogger(PLNetDyadRanker.class);
private MultiLayerNetwork plNet;
private IPLNetDyadRankerConfiguration configuration;
private int epoch;
private int iteration;
/**
* Constructs a new {@link PLNetDyadRanker} using the default
* {@link IPLNetDyadRankerConfiguration}.
*
*/
public PLNetDyadRanker() {
this.configuration = ConfigFactory.create(IPLNetDyadRankerConfiguration.class);
}
/**
* Constructs a new {@link PLNetDyadRanker} using the given
* {@link IPLNetDyadRankerConfiguration}.
*
* @param config
* Configuration for the {@link PLNetDyadRanker}.
*/
public PLNetDyadRanker(final IPLNetDyadRankerConfiguration config) {
this.configuration = config;
}
private void tryUpdatingWithMinibatch(final List<INDArray> drTrain) {
int miniBatchSize = this.configuration.plNetMiniBatchSize();
List<INDArray> miniBatch = new ArrayList<>(miniBatchSize);
for (INDArray dyadRankingInstance : drTrain) {
miniBatch.add(dyadRankingInstance);
if (miniBatch.size() == miniBatchSize) {
this.updateWithMinibatch(miniBatch);
miniBatch.clear();
}
}
if (!miniBatch.isEmpty()) {
this.updateWithMinibatch(miniBatch);
miniBatch.clear();
}
}
private INDArray computeScaledGradient(final INDArray dyadMatrix) {
int dyadRankingLength = dyadMatrix.rows();
List<INDArray> activations = this.plNet.feedForward(dyadMatrix);
INDArray output = activations.get(activations.size() - 1);
output = output.transpose();
INDArray deltaW = Nd4j.zeros(this.plNet.params().length());
Gradient deltaWk = null;
MultiLayerNetwork plNetClone = this.plNet.clone();
for (int k = 0; k < dyadRankingLength; k++) {
// compute derivative of loss w.r.t. k
plNetClone.setInput(dyadMatrix.getRow(k));
plNetClone.feedForward(true, false);
INDArray lossGradient = PLNetLoss.computeLossGradient(output, k);
// compute backprop gradient for weight updates w.r.t. k
Pair<Gradient, INDArray> p = plNetClone.backpropGradient(lossGradient, null);
deltaWk = p.getFirst();
this.plNet.getUpdater().update(this.plNet, deltaWk, this.iteration, this.epoch, 1, LayerWorkspaceMgr.noWorkspaces());
deltaW.addi(deltaWk.gradient());
}
return deltaW;
}
/**
* Computes the gradient of the plNets' error function for a given instance. The
* returned gradient is already scaled by the updater. The update procedure is
* based on algorithm 2 in [1].
*
* @param instance
* The instance to compute the scaled gradient for.
* @return The gradient for the given instance, multiplied by the updater's
* learning rate.
*/
private INDArray computeScaledGradient(final IDyadRankingInstance instance) {
// init weight update vector
INDArray dyadMatrix;
List<INDArray> dyadList = new ArrayList<>(instance.getNumAttributes());
for (IDyad dyad : instance) {
INDArray dyadVector = this.dyadToVector(dyad);
dyadList.add(dyadVector);
}
dyadMatrix = this.dyadRankingToMatrix(instance);
List<INDArray> activations = this.plNet.feedForward(dyadMatrix);
INDArray output = activations.get(activations.size() - 1);
output = output.transpose();
INDArray deltaW = Nd4j.zeros(this.plNet.params().length());
Gradient deltaWk = null;
MultiLayerNetwork plNetClone = this.plNet.clone();
for (int k = 0; k < instance.getNumAttributes(); k++) {
// compute derivative of loss w.r.t. k
plNetClone.setInput(dyadList.get(k));
plNetClone.feedForward(true, false);
INDArray lossGradient = PLNetLoss.computeLossGradient(output, k);
// compute backprop gradient for weight updates w.r.t. k
Pair<Gradient, INDArray> p = plNetClone.backpropGradient(lossGradient, null);
deltaWk = p.getFirst();
this.plNet.getUpdater().update(this.plNet, deltaWk, this.iteration, this.epoch, 1, LayerWorkspaceMgr.noWorkspaces());
deltaW.addi(deltaWk.gradient());
}
return deltaW;
}
/**
* Updates this {@link PLNetDyadRanker} based on a given mini batch of
* {@link INDarray}s representing dyad rankings.
*
* @param minibatch
* A mini batch consisting of a {@link List} of {@link INDarray}.
*/
private void updateWithMinibatch(final List<INDArray> minibatch) {
double actualMiniBatchSize = minibatch.size();
INDArray cumulativeDeltaW = Nd4j.zeros(this.plNet.params().length());
for (INDArray instance : minibatch) {
cumulativeDeltaW.addi(this.computeScaledGradient(instance));
}
cumulativeDeltaW.muli(1 / actualMiniBatchSize);
this.plNet.params().subi(cumulativeDeltaW);
this.iteration++;
}
/**
* Updates this {@link PLNetDyadRanker} based on the given {@link IInstance},
* which needs to be an {@link IDyadRankingInstance}. The update procedure is
* based on algorithm 2 in [1].
*
*
* @param instances
* The {@link IInstance} the update should be based on. Needs to be a
* {@link IDyadRankingInstance}.
* @throws TrainingException
* If something fails during the update process.
*/
public void update(final IDyadRankingInstance instance) throws TrainingException {
if (this.plNet == null) {
int dyadSize = instance.getLabel().get(0).getContext().length() + instance.getLabel().get(0).getAlternative().length();
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
INDArray deltaW = this.computeScaledGradient(instance);
this.plNet.params().subi(deltaW);
this.iteration++;
}
public void update(final Set<IDyadRankingInstance> instances) throws TrainingException {
List<INDArray> minibatch = new ArrayList<>(instances.size());
for (IDyadRankingInstance instance : instances) {
if (this.plNet == null) {
int dyadSize = (instance.getLabel().get(0).getContext().length()) + (instance.getLabel().get(0).getAlternative().length());
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
if (!(instance instanceof ADyadRankingInstance)) {
throw new TrainingException("Can only with with instances of type ADyadRankingInstance.");
}
minibatch.add(((ADyadRankingInstance) instance).toMatrix());
}
this.updateWithMinibatch(minibatch);
}
/**
* Computes the average error on a set of dyad rankings in terms on the negative
* log likelihood (NLL).
*
* @param drTest
* Test data on which the error should be computed given as a
* {@link List} of {@link IDyadRankingInstance}
* @return Average error on the given test data
*/
private double computeAvgError(final List<INDArray> drTest) {
DescriptiveStatistics stats = new DescriptiveStatistics();
for (INDArray dyadRankingInstance : drTest) {
INDArray outputs = this.plNet.output(dyadRankingInstance);
outputs = outputs.transpose();
double score = PLNetLoss.computeLoss(outputs).getDouble(0);
stats.addValue(score);
}
return stats.getMean();
}
/**
* Creates a simple feed-forward {@link MultiLayerNetwork} that can be used as a
* PLNet for dyad-ranking.
*
* @param numInputs
* The number of inputs to the network, i.e. the number of features
* of a dyad.
* @return New {@link MultiLayerNetwork}
*/
private MultiLayerNetwork createNetwork(final int numInputs) {
if (this.configuration.plNetHiddenNodes().isEmpty()) {
throw new IllegalArgumentException("There must be at least one hidden layer in specified in the config file!");
}
ListBuilder configBuilder = new NeuralNetConfiguration.Builder().seed(this.configuration.plNetSeed())
// Gradient descent updater: Adam
.updater(new Adam(this.configuration.plNetLearningRate())).list();
// Build hidden layers
String activation = this.configuration.plNetActivationFunction();
int inputsFirstHiddenLayer = this.configuration.plNetHiddenNodes().get(0);
configBuilder.layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(inputsFirstHiddenLayer).weightInit(WeightInit.SIGMOID_UNIFORM).activation(Activation.fromString(activation)).hasBias(true).build());
List<Integer> hiddenNodes = this.configuration.plNetHiddenNodes();
for (int i = 0; i < hiddenNodes.size() - 1; i++) {
int numIn = hiddenNodes.get(i);
int numOut = hiddenNodes.get(i + 1);
configBuilder.layer(i + 1, new DenseLayer.Builder().nIn(numIn).nOut(numOut).weightInit(WeightInit.SIGMOID_UNIFORM).activation(Activation.fromString(activation)).hasBias(true).build());
}
// Build output layer. Since we are using an external error for training,
// this is a regular layer instead of an OutputLayer
configBuilder.layer(hiddenNodes.size(), new DenseLayer.Builder().nIn(hiddenNodes.get(hiddenNodes.size() - 1)).nOut(1).weightInit(WeightInit.UNIFORM).activation(Activation.IDENTITY).hasBias(true).build());
MultiLayerConfiguration multiLayerConfig = configBuilder.build();
return new MultiLayerNetwork(multiLayerConfig);
}
/**
* Converts a dyad to a {@link INDArray} row vector consisting of a
* concatenation of the instance and alternative features.
*
* @param dyad
* The dyad to convert.
* @return The dyad in {@link INDArray} row vector form.
*/
private INDArray dyadToVector(final IDyad dyad) {
INDArray instanceOfDyad = Nd4j.create(dyad.getContext().asArray());
INDArray alternativeOfDyad = Nd4j.create(dyad.getAlternative().asArray());
return Nd4j.hstack(instanceOfDyad, alternativeOfDyad);
}
/**
* Converts a dyad ranking to a {@link INDArray} matrix where each row
* corresponds to a dyad.
*
* @param drInstance
* The dyad ranking to convert to a matrix.
* @return The dyad ranking in {@link INDArray} matrix form.
*/
private INDArray dyadRankingToMatrix(final IDyadRankingInstance drInstance) {
List<INDArray> dyadList = new ArrayList<>(drInstance.getNumAttributes());
for (IDyad dyad : drInstance) {
INDArray dyadVector = this.dyadToVector(dyad);
dyadList.add(dyadVector);
}
INDArray dyadMatrix;
dyadMatrix = Nd4j.vstack(dyadList);
return dyadMatrix;
}
/**
* Creates a simple feed-forward {@link MultiLayerNetwork} using the json
* representation of a {@link MultiLayerConfiguration} in the file .
*
* @param configFile
* {@link File} containing the json representation of the
* {@link MultiLayerConfiguration}
*/
public void createNetworkFromDl4jConfigFile(final File configFile) {
String json = "";
try {
json = FileUtil.readFileAsString(configFile);
} catch (IOException e) {
log.error(e.getMessage());
}
MultiLayerConfiguration config = MultiLayerConfiguration.fromJson(json);
MultiLayerNetwork network = new MultiLayerNetwork(config);
this.plNet = network;
}
/**
* Save a trained model at a given file path. Note that the produced file is a
* zip file and a ".zip" ending is added.
*
* @param filePath
* The file path to save to.
* @throws IOException
*/
public void saveModelToFile(final String filePath) throws IOException {
if (this.plNet == null) {
throw new IllegalStateException("Cannot save untrained model.");
}
File locationToSave = new File(filePath + ".zip");
ModelSerializer.writeModel(this.plNet, locationToSave, true);
}
/**
* Restore a trained model from a given file path. Warning: does not check
* whether the loaded model is a valid PLNet or conforms to the configuration of
* the object.
*
* @param filePath
* The file to load from.
* @throws IOException
*/
public void loadModelFromFile(final String filePath) throws IOException {
MultiLayerNetwork restored = ModelSerializer.restoreMultiLayerNetwork(filePath);
this.plNet = restored;
}
public MultiLayerNetwork getPlNet() {
return this.plNet;
}
public int getEpoch() {
return this.epoch;
}
/**
* Returns the pair of {@link Dyad}s for which the model is least certain.
*
* @param drInstance
* Ranking for which certainty should be assessed.
* @return The pair of {@link Dyad}s for which the model is least certain.
*/
public IDyadRankingInstance getPairWithLeastCertainty(final IDyadRankingInstance drInstance) {
if (this.plNet == null) {
int dyadSize = (drInstance.getLabel().get(0).getContext().length()) + (drInstance.getLabel().get(0).getAlternative().length());
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
if (drInstance.getNumAttributes() < 2) {
throw new IllegalArgumentException("The query instance must contain at least 2 dyads!");
}
List<Pair<IDyad, Double>> dyadUtilityPairs = new ArrayList<>(drInstance.getNumAttributes());
for (IDyad dyad : drInstance) {
INDArray plNetInput = this.dyadToVector(dyad);
double plNetOutput = this.plNet.output(plNetInput).getDouble(0);
dyadUtilityPairs.add(new Pair<>(dyad, plNetOutput));
}
// sort the instance in descending order of utility values
Collections.sort(dyadUtilityPairs, Comparator.comparing(p -> -p.getRight()));
int indexOfPairWithLeastCertainty = 0;
double currentlyLowestCertainty = Double.MAX_VALUE;
for (int i = 0; i < dyadUtilityPairs.size() - 1; i++) {
double currentCertainty = Math.abs(dyadUtilityPairs.get(i).getRight() - dyadUtilityPairs.get(i + 1).getRight());
if (currentCertainty < currentlyLowestCertainty) {
currentlyLowestCertainty = currentCertainty;
indexOfPairWithLeastCertainty = i;
}
}
List<IDyad> leastCertainDyads = new LinkedList<>();
leastCertainDyads.add(dyadUtilityPairs.get(indexOfPairWithLeastCertainty).getLeft());
leastCertainDyads.add(dyadUtilityPairs.get(indexOfPairWithLeastCertainty + 1).getLeft());
return new DenseDyadRankingInstance(leastCertainDyads);
}
/**
* Returns the probablity of the top ranking for a given
* {@link IDyadRankingInstance} under the Plackett Luce model parametrized by
* the latent skill values predicted by the PLNet. This may be useful as the
* probability of a particular ranking diminishes drastically with increasing
* length of the ranking.
*
* @param drInstance
* {@link IDyadRankingInstance} for which the probability is
* computed.
* @return Probablity of the top ranking for a given
* {@link IDyadRankingInstance} given the Plackett Luce model
* parametrized by the skill values predicted by the PLNet.
*/
public double getProbabilityOfTopRanking(final IDyadRankingInstance drInstance) {
return this.getProbabilityOfTopKRanking(drInstance, drInstance.getNumAttributes());
}
private List<Pair<IDyad, Double>> getDyadUtilityPairsForInstance(final IDyadRankingInstance drInstance) {
if (this.plNet == null) {
int dyadSize = (drInstance.getLabel().get(0).getContext().length()) + (drInstance.getLabel().get(0).getAlternative().length());
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
List<Pair<IDyad, Double>> dyadUtilityPairs = new ArrayList<>(drInstance.getNumAttributes());
for (IDyad dyad : drInstance) {
INDArray plNetInput = this.dyadToVector(dyad);
double plNetOutput = this.plNet.output(plNetInput).getDouble(0);
dyadUtilityPairs.add(new Pair<>(dyad, plNetOutput));
}
return dyadUtilityPairs;
}
private List<Pair<IDyad, Double>> getSortedDyadUtilityPairsForInstance(final IDyadRankingInstance drInstance) {
List<Pair<IDyad, Double>> dyadUtilityPairs = this.getDyadUtilityPairsForInstance(drInstance);
Collections.sort(dyadUtilityPairs, Comparator.comparing(p -> -p.getRight()));
return dyadUtilityPairs;
}
public double getProbabilityOfTopKRanking(final IDyadRankingInstance drInstance, final int k) {
List<Pair<IDyad, Double>> dyadUtilityPairs = this.getSortedDyadUtilityPairsForInstance(drInstance);
// compute the probability of this ranking according to the Plackett-Luce model
double currentProbability = 1;
for (int i = 0; i < Integer.min(k, dyadUtilityPairs.size()); i++) {
double sumOfRemainingSkills = 0;
for (int j = i; j < Integer.min(k, dyadUtilityPairs.size()); j++) {
sumOfRemainingSkills += Math.exp(dyadUtilityPairs.get(j).getRight());
}
if (sumOfRemainingSkills != 0) {
currentProbability *= (Math.exp(dyadUtilityPairs.get(i).getRight()) / sumOfRemainingSkills);
} else {
currentProbability = Double.NaN;
}
}
return currentProbability;
}
/**
* Returns the the log of the probablity of the top ranking for a given
* {@link IDyadRankingInstance} under the Plackett Luce model parametrized by
* the latent skill values predicted by the PLNet. This may be useful as the
* probability of a particular ranking diminishes drastically with increasing
* length of the ranking.
*
* @param drInstance
* {@link IDyadRankingInstance} for which the probability is
* computed.
* @return Log of the probablity of the top ranking for a given
* {@link IDyadRankingInstance} given the Plackett Luce model
* parametrized by the skill values predicted by the PLNet.
*/
public double getLogProbabilityOfTopRanking(final IDyadRankingInstance drInstance) {
return this.getLogProbabilityOfTopKRanking(drInstance, Integer.MAX_VALUE);
}
/**
* Returns the log of the probablity of the top k of a given
* {@link IDyadRankingInstance} under the Plackett Luce model parametrized by
* the latent skill values predicted by the PLNet. This may be useful as the
* probability of a particular ranking diminishes drastically with increasing
* length of the ranking.
*
* @param drInstance
* {@link IDyadRankingInstance} for which the probability is
* computed.
* @param k
* Number of top dyads to be considered.
* @return Log of the probablity of the top k of a the given
* {@link IDyadRankingInstance} given the Plackett Luce model
* parametrized by the skill values predicted by the PLNet.
*/
public double getLogProbabilityOfTopKRanking(final IDyadRankingInstance drInstance, final int k) {
List<Pair<IDyad, Double>> dyadUtilityPairs = this.getSortedDyadUtilityPairsForInstance(drInstance);
// compute the probability of this ranking according to the Plackett-Luce model
double currentProbability = 0;
for (int i = 0; i < Integer.min(k, dyadUtilityPairs.size()); i++) {
double sumOfRemainingSkills = 0;
for (int j = i; j < Integer.min(k, dyadUtilityPairs.size()); j++) {
sumOfRemainingSkills += Math.exp(dyadUtilityPairs.get(j).getRight());
}
currentProbability += (dyadUtilityPairs.get(i).getRight() - Math.log(sumOfRemainingSkills));
}
return currentProbability;
}
/**
* Returns the probablity of a given {@link IDyadRankingInstance} under the
* Plackett Luce model parametrized by the latent skill values predicted by the
* PLNet.
*
* @param drInstance
* {@link IDyadRankingInstance} for which the probability is
* computed.
* @return Probability of the given {@link IDyadRankingInstance} given the
* Plackett Luce model parametrized by the skill values predicted by the
* PLNet.
*/
public double getProbabilityRanking(final IDyadRankingInstance drInstance) {
List<Pair<IDyad, Double>> dyadUtilityPairs = this.getDyadUtilityPairsForInstance(drInstance);
// compute the probability of this ranking according to the Plackett-Luce model
double currentProbability = 1;
for (int i = 0; i < dyadUtilityPairs.size(); i++) {
double sumOfRemainingSkills = 0;
for (int j = i; j < dyadUtilityPairs.size(); j++) {
sumOfRemainingSkills += Math.exp(dyadUtilityPairs.get(j).getRight());
}
if (sumOfRemainingSkills != 0) {
currentProbability *= (Math.exp(dyadUtilityPairs.get(i).getRight()) / sumOfRemainingSkills);
} else {
currentProbability = Double.NaN;
}
}
return currentProbability;
}
/**
* Computes the logarithmic probability for a particular ranking according to
* the log Placket-Luce model.
*
* @param drInstance
* @return Logarithmic probability of the given ranking.
*/
public double getLogProbabilityRanking(final IDyadRankingInstance drInstance) {
List<Pair<IDyad, Double>> dyadUtilityPairs = this.getDyadUtilityPairsForInstance(drInstance);
// compute the probability of this ranking according to the Plackett-Luce model
double currentProbability = 0;
for (int i = 0; i < dyadUtilityPairs.size(); i++) {
double sumOfRemainingSkills = 0;
for (int j = i; j < dyadUtilityPairs.size(); j++) {
sumOfRemainingSkills += dyadUtilityPairs.get(j).getRight();
}
currentProbability += (dyadUtilityPairs.get(i).getRight() - sumOfRemainingSkills);
}
return currentProbability;
}
/**
* Returns the latent skill value predicted by the PLNet for a given
* {@link Dyad}.
*
* @param dyad
* {@link Dyad} for which the skill is to be predicted.
* @return Skill of the given {@link Dyad}.
*/
public double getSkillForDyad(final IDyad dyad) {
if (this.plNet == null) {
return Double.NaN;
}
INDArray plNetInput = this.dyadToVector(dyad);
return this.plNet.output(plNetInput).getDouble(0);
}
public void fit(final DyadRankingDataset dataset, final int maxEpochs, final double earlyStoppingTrainRatio) {
this.fit(dataset.toND4j(), maxEpochs, earlyStoppingTrainRatio);
}
public void fit(final List<INDArray> dataset, final int maxEpochs, final double earlyStoppingTrainRatio) {
List<INDArray> drTrain = dataset.subList(0, (int) (earlyStoppingTrainRatio * dataset.size()));
List<INDArray> drTest = dataset.subList((int) (earlyStoppingTrainRatio * dataset.size()), dataset.size());
if (this.plNet == null) {
int dyadSize = dataset.get(0).columns();
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
double currentBestScore = Double.POSITIVE_INFINITY;
MultiLayerNetwork currentBestModel = this.plNet;
this.epoch = 0;
this.iteration = 0;
int patience = 0;
int earlyStoppingCounter = 0;
while ((patience < this.configuration.plNetEarlyStoppingPatience() || this.configuration.plNetEarlyStoppingPatience() <= 0) && (this.epoch < maxEpochs || maxEpochs == 0)) {
// Iterate through training data
this.tryUpdatingWithMinibatch(drTrain);
log.debug("plNet params: {}", this.plNet.params());
earlyStoppingCounter++;
// Compute validation error
if (earlyStoppingCounter == this.configuration.plNetEarlyStoppingInterval() && earlyStoppingTrainRatio < 1.0) {
double avgScore = this.computeAvgError(drTest);
if (avgScore < currentBestScore) {
currentBestScore = avgScore;
currentBestModel = this.plNet.clone();
log.debug("current best score: {}", currentBestScore);
patience = 0;
} else {
patience++;
}
earlyStoppingCounter = 0;
}
this.epoch++;
}
this.plNet = currentBestModel;
}
public void fit(final List<INDArray> dataset) {
this.fit(dataset, this.configuration.plNetMaxEpochs(), this.configuration.plNetEarlyStoppingTrainRatio());
if (this.configuration.plNetEarlyStoppingRetrain()) {
int maxEpochs = this.epoch;
this.plNet = null;
this.fit(dataset, maxEpochs, 1.0);
}
}
@Override
public void fit(final IDyadRankingDataset dTrain) throws TrainingException, InterruptedException {
if (!(dTrain instanceof DyadRankingDataset)) {
throw new TrainingException("Can only with with instances of type DyadRankingDataset.");
}
this.fit(((DyadRankingDataset) dTrain).toND4j());
}
@Override
public IRanking<IDyad> predict(final IDyadRankingInstance xTest) throws PredictionException, InterruptedException {
if (this.plNet == null) {
int dyadSize = (xTest.getLabel().get(0).getContext().length()) + (xTest.getLabel().get(0).getAlternative().length());
this.plNet = this.createNetwork(dyadSize);
this.plNet.init();
}
List<Pair<IDyad, Double>> dyadUtilityPairs = new ArrayList<>(xTest.getNumAttributes());
for (IDyad dyad : xTest) {
INDArray plNetInput = this.dyadToVector(dyad);
double plNetOutput = this.plNet.output(plNetInput).getDouble(0);
dyadUtilityPairs.add(new Pair<>(dyad, plNetOutput));
}
// sort the instance in descending order of utility values
Collections.sort(dyadUtilityPairs, Comparator.comparing(p -> -p.getRight()));
List<IDyad> ranking = new ArrayList<>();
dyadUtilityPairs.stream().map(Pair::getLeft).forEach(ranking::add);
return new Ranking<>(ranking);
}
public double getCertainty(final IDyadRankingInstance queryInstance, final IRanking<IDyad> sizeTwoRanking) {
if (queryInstance.getNumAttributes() != 2) {
throw new IllegalArgumentException("Can only provide certainty for pairs of dyads!");
}
List<Pair<IDyad, Double>> dyadUtilityPairs = new ArrayList<>(sizeTwoRanking.size());
for (IDyad dyad : queryInstance) {
INDArray plNetInput = this.dyadToVector(dyad);
double plNetOutput = this.plNet.output(plNetInput).getDouble(0);
dyadUtilityPairs.add(new Pair<>(dyad, plNetOutput));
}
return Math.abs(dyadUtilityPairs.get(0).getRight() - dyadUtilityPairs.get(1).getRight());
}
@Override
public double getCertainty(final ILabeledInstance testInstance, final Object label) throws PredictionException, InterruptedException {
throw new UnsupportedOperationException("Not yet implemented.");
}
@Override
public IRankingPredictionBatch predict(IDyadRankingInstance[] dTest) throws PredictionException, InterruptedException {
List<IRanking<?>> rankings = new ArrayList<>();
for (IDyadRankingInstance instance : dTest) {
rankings.add(predict(instance));
}
return new RankingPredictionBatch(rankings);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/PLNetLoss.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.ops.transforms.Transforms;
/**
* Implements the negative log likelihood (NLL) loss function for PL networks as described in [1].
* *
* [1]: Dirk Schäfer, Eyke Hüllermeier (2018). Dyad ranking using Plackett-Luce models based on joint feature representations
*
* @author michael
*
*/
public class PLNetLoss {
private PLNetLoss() {
//Intentionally left blank
}
/**
* Computes the NLL for PL networks according to equation (27) in [1].
*
* @param plNetOutputs The outputs for M_n dyads generated by a PLNet's output layer in order of their ranking (from best to worst).
* @return The NLL loss for the given PLNet outputs.
*/
public static INDArray computeLoss(INDArray plNetOutputs) {
if (!(plNetOutputs.isRowVector()) || plNetOutputs.size(1) < 2 ) {
throw new IllegalArgumentException("Input has to be a row vector of 2 or more elements.");
}
long dyadRankingLength = plNetOutputs.size(1);
double loss = 0;
for (int m = 0; m <= dyadRankingLength - 2; m++) {
INDArray innerSumSlice = plNetOutputs.get(NDArrayIndex.interval(m, dyadRankingLength));
innerSumSlice = Transforms.exp(innerSumSlice);
loss += Transforms.log(innerSumSlice.sum(1)).getDouble(0);
}
loss -= plNetOutputs.get(NDArrayIndex.interval(0, dyadRankingLength - 1)).sum(1).getDouble(0);
return Nd4j.create(new double[]{loss});
}
/**
* Computes the gradient of the NLL for PL networks w.r.t. the k-th dyad according to equation (28) in [1].
* @param plNetOutputs The outputs for M_n dyads generated by a PLNet's output layer in order of their ranking (from best to worst).
* @param k The ranking position with respect to which the gradient should be computed. Assumes zero-based indices, unlike the paper.
* @return The gradient of the NLL loss w.r.t. the k-th dyad in the ranking.
*/
public static INDArray computeLossGradient(INDArray plNetOutputs, int k) {
if (!(plNetOutputs.isRowVector()) || plNetOutputs.size(1) < 2 || k < 0 || k >= plNetOutputs.size(1)) {
throw new IllegalArgumentException("Input has to be a row vector of 2 or more elements. And k has to be a valid index of plNetOutputs.");
}
long dyadRankingLength = plNetOutputs.size(1);
double errorGradient = 0;
for (int m = 0; m <= k; m++) {
INDArray innerSumSlice = plNetOutputs.get(NDArrayIndex.interval(m, dyadRankingLength));
innerSumSlice = Transforms.exp(innerSumSlice);
double innerSum = innerSumSlice.sum(1).getDouble(0);
errorGradient += Math.exp(plNetOutputs.getDouble(k)) / innerSum;
}
errorGradient -= 1;
return Nd4j.create(new double[] {errorGradient});
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/featuretransform/BiliniearFeatureTransform.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.featuretransform;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.common.math.IVector;
/**
* Implementation of the feature transformation method using the Kroenecker
* Product.
*
* @author Helena Graf, Mirko Jürgens
*
*/
public class BiliniearFeatureTransform implements IDyadFeatureTransform {
@Override
public IVector transform(final IDyad dyad) {
IVector x = dyad.getContext();
IVector y = dyad.getAlternative();
return x.kroneckerProduct(y.asArray());
}
@Override
public int getTransformedVectorLength(final int alternativeLength, final int instanceLength) {
return alternativeLength * instanceLength;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/featuretransform/FeatureTransformPLDyadRanker.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.featuretransform;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.IRankingPredictionBatch;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector;
import ai.libs.jaicore.ml.core.learner.ASupervisedLearner;
import ai.libs.jaicore.ml.ranking.RankingPredictionBatch;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.IPLDyadRanker;
import ai.libs.jaicore.ml.ranking.dyad.learner.optimizing.BilinFunction;
import ai.libs.jaicore.ml.ranking.dyad.learner.optimizing.DyadRankingFeatureTransformNegativeLogLikelihood;
import ai.libs.jaicore.ml.ranking.dyad.learner.optimizing.DyadRankingFeatureTransformNegativeLogLikelihoodDerivative;
import ai.libs.jaicore.ml.ranking.dyad.learner.optimizing.IDyadRankingFeatureTransformPLGradientDescendableFunction;
import ai.libs.jaicore.ml.ranking.dyad.learner.optimizing.IDyadRankingFeatureTransformPLGradientFunction;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking;
import edu.stanford.nlp.optimization.QNMinimizer;
/**
* A feature transformation Plackett-Luce dyad ranker. By default uses bilinear
* feature transformation.
*
* All the provided algorithms are implementations of the PLModel introduced in
* [1].
*
*
* [1] Schäfer, D. & Hüllermeier, Dyad ranking using Plackett-Luce models based
* on joint feature representations,
* https://link.springer.com/article/10.1007%2Fs10994-017-5694-9
*
* @author Helena Graf, Mirko Jürgens
*
*/
public class FeatureTransformPLDyadRanker extends ASupervisedLearner<IDyadRankingInstance, IDyadRankingDataset, IRanking<IDyad>, IRankingPredictionBatch> implements IPLDyadRanker {
private static final Logger log = LoggerFactory.getLogger(FeatureTransformPLDyadRanker.class);
/* Phi in the paper */
private IDyadFeatureTransform featureTransform;
/*
* The label-specific weight vector, that determines the linear function used to
* calculate the skill parameters
*/
private IVector w;
/* The differentiable function optimized by the optimzer to find vector w */
private IDyadRankingFeatureTransformPLGradientDescendableFunction negativeLogLikelihood = new DyadRankingFeatureTransformNegativeLogLikelihood();
/* The derivation of the above function */
private IDyadRankingFeatureTransformPLGradientFunction negativeLogLikelihoodDerivative = new DyadRankingFeatureTransformNegativeLogLikelihoodDerivative();
/**
* Constructs a new feature transform Placket-Luce dyad ranker with bilinear
* feature transformation.
*/
public FeatureTransformPLDyadRanker() {
this(new BiliniearFeatureTransform());
}
/**
* Constructs a new feature transform Placket-Luce dyad ranker with the given
* feature transformation method.
*
* @param featureTransform
* the feature transformation method to use
*/
public FeatureTransformPLDyadRanker(final IDyadFeatureTransform featureTransform) {
this.featureTransform = featureTransform;
}
private double computeSkillForDyad(final IDyad dyad) {
IVector featureTransformVector = this.featureTransform.transform(dyad);
double dot = this.w.dotProduct(featureTransformVector);
double val = Math.exp(dot);
log.debug("Feature transform for dyad {} is {}. \n Dot-Product is {} and skill is {}", dyad, featureTransformVector, dot, val);
return val;
}
/**
* Computes the likelihood of the parameter vector w. Algorithm (16) of [1].
*
* @param w
* the likelihood to be computed
* @param dataset
* the dataset on which the likelihood should be evaluated
* @return the likelihood, measured as a probability
*/
private double likelihoodOfParameter(final IVector w, final IDyadRankingDataset dataset) {
int largeN = dataset.size();
double outerProduct = 1.0;
for (int smallN = 0; smallN < largeN; smallN++) {
IDyadRankingInstance dyadRankingInstance = dataset.get(smallN);
int mN = dyadRankingInstance.getNumberOfRankedElements();
double innerProduct = 1.0;
for (int m = 0; m < mN; m++) {
IDyad dyad = dyadRankingInstance.getLabel().get(m);
IVector zNM = this.featureTransform.transform(dyad);
double en = Math.exp(w.dotProduct(zNM));
double denumSum = 0;
for (int l = m; l < mN; l++) {
IDyad dyadL = dyadRankingInstance.getLabel().get(l);
IVector zNL = this.featureTransform.transform(dyadL);
denumSum += Math.exp(w.dotProduct(zNL));
}
innerProduct = innerProduct * (en / denumSum);
}
outerProduct = outerProduct * innerProduct;
}
return outerProduct;
}
@Override
public void fit(final IDyadRankingDataset dataset) throws TrainingException, InterruptedException {
Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms = this.featureTransform.getPreComputedFeatureTransforms(dataset);
this.negativeLogLikelihood.initialize(dataset, featureTransforms);
this.negativeLogLikelihoodDerivative.initialize(dataset, featureTransforms);
int alternativeLength = dataset.get(0).getLabel().get(0).getAlternative().length();
int instanceLength = dataset.get(0).getLabel().get(0).getContext().length();
this.w = new DenseDoubleVector(this.featureTransform.getTransformedVectorLength(alternativeLength, instanceLength), 0.3);
log.debug("Likelihood of the randomly filled w is {}", this.likelihoodOfParameter(this.w, dataset));
BilinFunction fun = new BilinFunction(featureTransforms, dataset, this.featureTransform.getTransformedVectorLength(alternativeLength, instanceLength));
QNMinimizer minimizer = new QNMinimizer();
this.w = new DenseDoubleVector(minimizer.minimize(fun, 0.01, this.w.asArray()));
log.debug("Finished optimizing, the final w is {}", this.w);
}
@Override
public IRanking<IDyad> predict(final IDyadRankingInstance instance) throws PredictionException, InterruptedException {
if (this.w == null) {
throw new PredictionException("The Ranker has not been trained yet.");
}
log.debug("Training ranker with instance {}", instance);
List<Pair<Double, IDyad>> skillForDyads = new ArrayList<>();
for (IDyad d : instance) {
double skill = this.computeSkillForDyad(d);
skillForDyads.add(new Pair<>(skill, d));
}
return new Ranking<>(skillForDyads.stream().sorted((p1, p2) -> Double.compare(p1.getX(), p2.getX())).map(Pair<Double, IDyad>::getY).collect(Collectors.toList()));
}
@Override
public IRankingPredictionBatch predict(IDyadRankingInstance[] dTest) throws PredictionException, InterruptedException {
List<IRanking<?>> rankings = new ArrayList<>();
for (IDyadRankingInstance instance : dTest) {
rankings.add(predict(instance));
}
return new RankingPredictionBatch(rankings);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/algorithm/featuretransform/IDyadFeatureTransform.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.featuretransform;
import java.util.HashMap;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
/**
* Feature transformation interface for the
* {@link FeatureTransformPLDyadRanker}.
*
* @author Helena Graf, Mirko Jürgens
*
*/
public interface IDyadFeatureTransform {
/**
* Transform the instance of the given dyad (models the skill).
*
* @param dyad
* the dyad to transform
* @return the transformed instance values for the dyad
*/
public IVector transform(IDyad dyad);
/**
* Get the length of the vector returned by the transform method.
*
* @param alternativeLength
* the length of the alternative vector of the transformed dyad
* @param instanceLength
* the length of the instance vector of the transformed dyad
* @return the length of the transformed feature vector
*/
public int getTransformedVectorLength(int alternativeLength, int instanceLength);
/**
* Precomputed the feature transforms for the dataset, this can speed up the
* runtime as the feature transform will be reduced to O(1) at the cost of O(n).
*
* @param dataset
* @return the feature transform
*/
default Map<IDyadRankingInstance, Map<IDyad, IVector>> getPreComputedFeatureTransforms(final IDyadRankingDataset dataset) {
Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms = new HashMap<>();
for (IDyadRankingInstance instance : dataset) {
IDyadRankingInstance rankingInstance = instance;
Map<IDyad, IVector> transforms = new HashMap<>();
for (int i = 0; i < rankingInstance.getLabel().size(); i++) {
transforms.put(rankingInstance.getLabel().get(i), this.transform(rankingInstance.getLabel().get(i)));
}
featureTransforms.put(rankingInstance, transforms);
}
return featureTransforms;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/optimizing/BilinFunction.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.optimizing;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.featuretransform.BiliniearFeatureTransform;
import edu.stanford.nlp.optimization.DiffFunction;
import edu.stanford.nlp.optimization.QNMinimizer;
/**
* Wraps the NLL optimizing problem into the {@link QNMinimizer} optimizer.
*
* @author mirkoj
*
*/
public class BilinFunction implements DiffFunction {
/* The function to optimize */
private DyadRankingFeatureTransformNegativeLogLikelihood function;
/* The gradient */
private DyadRankingFeatureTransformNegativeLogLikelihoodDerivative gradient;
/* The dimension of the vector that should be optimized. */
private int dimension;
/**
* Creates a NLL optimizing problem for the kronecker product as the bilinear feature transform.
*
* @param featureTransform the feature transform, must be an instance of {@link BiliniearFeatureTransform}
* @param drDataset the dataset to optimize
* @param dimension the dimension of the optimized vector
*/
public BilinFunction(final Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms, final IDyadRankingDataset drDataset, final int dimension) {
this.function = new DyadRankingFeatureTransformNegativeLogLikelihood();
this.function.initialize(drDataset, featureTransforms);
this.gradient = new DyadRankingFeatureTransformNegativeLogLikelihoodDerivative();
this.gradient.initialize(drDataset, featureTransforms);
this.dimension = dimension;
}
@Override
public double valueAt(final double[] x) {
return this.function.apply(new DenseDoubleVector(x));
}
@Override
public int domainDimension() {
return this.dimension;
}
@Override
public double[] derivativeAt(final double[] x) {
return this.gradient.apply(new DenseDoubleVector(x)).asArray();
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/optimizing/DyadRankingFeatureTransformNegativeLogLikelihood.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.optimizing;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.algorithm.IOptimizationAlgorithm;
import org.api4.java.common.math.IVector;
/**
* Implements the negative log-likelihood function for the feature
* transformation Placket-Luce dyad ranker.
*
* In particular, this implmentation is the NLL of [1] (we adhere their notation
* here). This NLL is a convex function, which we can optimize using an
* {@link IOptimizationAlgorithm}, together with the
* {@link DyadRankingFeatureTransformNegativeLogLikelihoodDerivative}.
*
*
* [1] Schäfer, D. & Hüllermeier, Dyad ranking using Plackett–Luce models based
* on joint feature representations,
* https://link.springer.com/article/10.1007%2Fs10994-017-5694-9
*
* @author Helena Graf, Mirko Jürgens
*
*/
public class DyadRankingFeatureTransformNegativeLogLikelihood implements IDyadRankingFeatureTransformPLGradientDescendableFunction {
/* the dataset used by this function */
private IDyadRankingDataset dataset;
private Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms;
@Override
public void initialize(final IDyadRankingDataset dataset, final Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms) {
this.dataset = dataset;
this.featureTransforms = featureTransforms;
}
/**
* Algorithm (18) of [1]. We adhere their notations, but, unify the sums.
*/
@Override
public double apply(final IVector w) {
double firstSum = 0d;
double secondSum = 0d;
int largeN = this.dataset.size();
for (int smallN = 0; smallN < largeN; smallN++) {
IDyadRankingInstance instance = this.dataset.get(smallN);
int mN = instance.getNumberOfRankedElements();
for (int m = 0; m < mN; m++) {
IDyad dyad = instance.getLabel().get(m);
firstSum = firstSum + w.dotProduct(this.featureTransforms.get(instance).get(dyad));
double innerSum = 0d;
for (int l = m; l < mN - 1; l++) {
IDyad innerDyad = instance.getLabel().get(l);
innerSum = innerSum + Math.exp(w.dotProduct(this.featureTransforms.get(instance).get(innerDyad)));
}
secondSum = secondSum + Math.log(innerSum);
}
}
return -firstSum + secondSum;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/optimizing/DyadRankingFeatureTransformNegativeLogLikelihoodDerivative.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.optimizing;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.math.linearalgebra.DenseDoubleVector;
/**
* Represents the derivate of the negative log likelihood function in the
* context of feature transformation Placket-Luce dyad ranking [1].
*
* This implementation can be used for the partial derivatives of the linear
* vector <code>w</code> w.r.t. the negative log-likelihood that should be
* minimized.
*
* [1] Schäfer, D. & Hüllermeier, Dyad ranking using Plackett–Luce models based
* on joint feature representations,
* https://link.springer.com/article/10.1007%2Fs10994-017-5694-9
*
* @author Helena Graf, Mirko Jürgens
*
*/
public class DyadRankingFeatureTransformNegativeLogLikelihoodDerivative implements IDyadRankingFeatureTransformPLGradientFunction {
/* the dataset used by this function */
private IDyadRankingDataset dataset;
private Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms;
@Override
public void initialize(final IDyadRankingDataset dataset, final Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms) {
this.dataset = dataset;
this.featureTransforms = featureTransforms;
}
@Override
public IVector apply(final IVector vector) {
IVector result = new DenseDoubleVector(vector.length());
for (int i = 0; i < vector.length(); i++) {
result.setValue(i, this.computeDerivativeForIndex(i, vector));
}
return result;
}
/**
* Computes the partial derivatives of every single w_i. Algorithm (19) of [1].
*
* @param i
* the index of the partial derivative.
* @param vector
* the w vector
* @return the partial derivative w_i
*/
private double computeDerivativeForIndex(final int i, final IVector vector) {
double secondSum = 0d;
int largeN = this.dataset.size();
double firstSum = 0d;
for (int smallN = 0; smallN < largeN; smallN++) {
IDyadRankingInstance instance = this.dataset.get(smallN);
int mN = instance.getNumberOfRankedElements();
for (int m = 0; m < mN - 1; m++) {
double innerDenumerator = 0d;
double innerNumerator = 0d;
IDyad dyad = instance.getLabel().get(m);
firstSum = firstSum + this.featureTransforms.get(instance).get(dyad).getValue(i);
for (int l = m; l < mN; l++) {
IVector zNL = this.featureTransforms.get(instance).get(instance.getAttributeValue(l));
double dotProd = Math.exp(vector.dotProduct(zNL));
innerNumerator = innerNumerator + zNL.getValue(i) * dotProd;
innerDenumerator = innerDenumerator + dotProd;
}
if (innerDenumerator != 0) {
secondSum = secondSum + innerNumerator / innerDenumerator;
}
}
}
return -firstSum + secondSum;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/optimizing/IDyadRankingFeatureTransformPLGradientDescendableFunction.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.optimizing;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.math.gradientdescent.IGradientDescendableFunction;
/**
* An interface for a differentiable function in the context of feature
* transformation Placket-Luce dyad ranking.
*
* @author Helena Graf
*
*/
public interface IDyadRankingFeatureTransformPLGradientDescendableFunction extends IGradientDescendableFunction {
/**
* Initializes the function with the given dataset.
*
* @param dataset
* the dataset to use
* @param featureTransform
* the feature precomputed feature transforms
*/
void initialize(IDyadRankingDataset dataset, Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransform);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/optimizing/IDyadRankingFeatureTransformPLGradientFunction.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.optimizing;
import java.util.Map;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.math.gradientdescent.IGradientFunction;
/**
* Represents a differentiable function in the context of dyad ranking based on
* feature transformation Placket-Luce models.
*
* @author Helena Graf
*
*/
public interface IDyadRankingFeatureTransformPLGradientFunction extends IGradientFunction {
/**
* Initialize the function with the given data set and feature transformation
* method.
*
* @param dataset
* the dataset to use
* @param featureTransforms
* the pre computed feature transformations
*/
void initialize(IDyadRankingDataset dataset, Map<IDyadRankingInstance, Map<IDyad, IVector>> featureTransforms);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/search/ADyadRankedNodeQueue.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.search;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IEvaluatedPath;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.common.math.IVector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DenseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.learner.Dyad;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.IDyadRanker;
import ai.libs.jaicore.ml.ranking.dyad.learner.util.AbstractDyadScaler;
/**
* A queue whose elements are nodes, sorted by a dyad ranker.
*
* @author Helena Graf
*
* @param <N>
* First node parameter
* @param <V>
* Second node parameter
*/
public abstract class ADyadRankedNodeQueue<N, V extends Comparable<V>> implements Queue<IEvaluatedPath<N, ?, V>> {
private Logger logger = LoggerFactory.getLogger(this.getClass());
/** the dyad ranker used to rank the nodes */
private IDyadRanker dyadRanker;
/** for scaling the dyads */
protected AbstractDyadScaler scaler;
private boolean useScaler = false;
/** the actual queue of nodes */
private List<IEvaluatedPath<N, ?, V>> queue = new ArrayList<>();
/**
* characterizations of the nodes (scaled) (not ordered the same way as the
* nodes!)
*/
private List<IVector> nodeCharacterizations = new ArrayList<>();
/** unscaled (original) characterization of the nodes */
private IVector originalContextCharacterization;
/** characterization of the context the nodes are ranked in */
private IVector contextCharacterization;
private List<IDyad> queryDyads = new ArrayList<>();
/** connects nodes to their respective characterizations */
private BiMap<IEvaluatedPath<N, ?, V>, IVector> nodesAndCharacterizationsMap = HashBiMap.create();
/**
* Constructs a new DyadRankedNodeQueue that ranks the nodes in the queue
* according to the given context characterization.
*
* @param contextCharacterization
* the characterization of the context the nodes are ranked in
*/
public ADyadRankedNodeQueue(final IVector contextCharacterization) {
this.contextCharacterization = contextCharacterization.addConstantToCopy(0);
this.originalContextCharacterization = contextCharacterization;
this.logger.trace("Construct ADyadNodeQueue with contexcharacterization {}", contextCharacterization);
}
/**
* Constructs a new DyadRankedNodeQueue that ranks the nodes in the queue
* according to the given context characterization and given dyad ranker. Given
* dyad ranker must be pre-trained.
*
* @param contextCharacterization
* the characterization of the context the nodes are ranked in
* @param dyadRanker
* the dyad ranker to be used for the ranking of the nodes
*/
public ADyadRankedNodeQueue(final IVector contextCharacterization, final IDyadRanker dyadRanker, final AbstractDyadScaler scaler) {
this(contextCharacterization);
this.dyadRanker = dyadRanker;
this.scaler = scaler;
if (scaler != null) {
this.useScaler = true;
// transform dataset
this.transformContextCharacterization();
}
}
/**
* Provide a characterization of the given node to be used by the dyad ranker.
*
* @param node
* the node to be characterized
* @return the characterization of the node
*/
protected abstract IVector characterize(IEvaluatedPath<N, ?, V> node);
@Override
public int size() {
return this.queue.size();
}
@Override
public boolean isEmpty() {
return this.queue.isEmpty();
}
@Override
public boolean contains(final Object o) {
return this.queue.contains(o);
}
@Override
public Iterator<IEvaluatedPath<N, ?, V>> iterator() {
return this.queue.iterator();
}
@Override
public Object[] toArray() {
return this.queue.toArray();
}
@Override
public <T> T[] toArray(final T[] a) {
return this.queue.toArray(a);
}
@Override
public boolean remove(final Object o) {
if (o instanceof IEvaluatedPath<?, ?, ?>) {
int index = -1;
for (int i = 0; i < this.queue.size(); i++) {
if (this.queue.get(i).equals(o)) {
index = i;
}
}
if (index != -1) {
this.removeNodeAtPosition(index);
return true;
}
return false;
}
return false;
}
@Override
public boolean containsAll(final Collection<?> c) {
return this.queue.containsAll(c);
}
@Override
public boolean addAll(final Collection<? extends IEvaluatedPath<N, ?, V>> c) {
this.logger.trace("Add {} nodes", c.size());
boolean changed = false;
for (IEvaluatedPath<N, ?, V> elem : c) {
if (this.add(elem)) {
changed = true;
}
}
return changed;
}
@Override
public boolean removeAll(final Collection<?> c) {
boolean changed = false;
for (Object o : c) {
if (this.remove(o)) {
changed = true;
}
}
return changed;
}
@Override
public boolean retainAll(final Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
this.queue.clear();
this.nodesAndCharacterizationsMap.clear();
this.nodeCharacterizations.clear();
}
@SuppressWarnings("unchecked")
@Override
public boolean add(final IEvaluatedPath<N, ?, V> e) {
if (this.queue.contains(e)) {
return true;
} else if (e != null) {
try {
this.logger.debug("Add node to OPEN.");
// characterize new node
IVector characterization = this.characterize(e);
this.nodeCharacterizations.add(characterization);
Dyad newDyad = new Dyad(this.contextCharacterization, characterization);
this.queryDyads.add(newDyad);
if (this.useScaler) {
// scale node
DyadRankingDataset dataset = new DyadRankingDataset();
dataset.add(new DenseDyadRankingInstance(Arrays.asList(newDyad)));
this.scaler.transformAlternatives(dataset);
}
this.replaceNaNByZeroes(characterization);
// add new pairing of node and characterization
this.nodesAndCharacterizationsMap.put(e, characterization);
// predict new ranking and reorder queue accordingly
IPrediction prediction = this.dyadRanker.predict(new DenseDyadRankingInstance(this.queryDyads));
this.queue.clear();
for (int i = 0; i < ((IRanking<Dyad>) prediction.getPrediction()).size(); i++) {
IEvaluatedPath<N, ?, V> toAdd = this.nodesAndCharacterizationsMap.inverse().get(((IRanking<Dyad>) prediction.getPrediction()).get(i).getAlternative());
if (toAdd != null) {
this.queue.add(toAdd);
} else {
this.logger.warn("Got a node in a prediction that doesnt exist");
}
}
return true;
} catch (PredictionException e1) {
this.logger.warn("Failed to characterize: {}", e1.getLocalizedMessage());
// remove unneeded characterization (ranking has failed)
this.nodeCharacterizations.remove(this.nodeCharacterizations.size() - 1);
return false;
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
return false;
}
} else {
return false;
}
}
private void replaceNaNByZeroes(final IVector vector) {
for (int i = 0; i < vector.length(); i++) {
if (Double.isNaN(vector.getValue(i))) {
vector.setValue(i, 0);
}
}
}
@Override
public boolean offer(final IEvaluatedPath<N, ?, V> e) {
return this.add(e);
}
@Override
public IEvaluatedPath<N, ?, V> remove() {
return this.removeNodeAtPosition(0);
}
public IEvaluatedPath<N, ?, V> removeNodeAtPosition(final int i) {
IEvaluatedPath<N, ?, V> removedNode = this.queue.remove(i);
this.logger.trace("Retrieve node from OPEN. Index: {}", i);
this.nodeCharacterizations.remove(this.nodesAndCharacterizationsMap.get(removedNode));
IVector removedAlternative = this.nodesAndCharacterizationsMap.remove(removedNode);
int index = -1;
for (int j = 0; j < this.queryDyads.size(); j++) {
if (this.queryDyads.get(j).getAlternative().equals(removedAlternative)) {
index = j;
break;
}
}
if (index >= -1) {
this.queryDyads.remove(index);
}
return removedNode;
}
@Override
public IEvaluatedPath<N, ?, V> poll() {
if (!this.queue.isEmpty()) {
return this.remove();
}
return null;
}
@Override
public IEvaluatedPath<N, ?, V> element() {
return this.queue.get(0);
}
@Override
public IEvaluatedPath<N, ?, V> peek() {
if (!this.queue.isEmpty()) {
this.logger.trace("Peek from OPEN.");
return this.element();
}
return null;
}
/**
* Get the dyad ranker used to rank the nodes.
*
* @return the dyad ranker
*/
public IDyadRanker getDyadRanker() {
return this.dyadRanker;
}
/**
* Set which dyad ranker shall be used to rank the nodes. It is not trained in
* this class, so it must be pre-trained before setting it as a dyad ranker for
* this queue!
*
* @param dyadRanker
* the dyad ranker
*/
public void setDyadRanker(final IDyadRanker dyadRanker) {
this.logger.trace("Update dyad ranker. Was {} now is {}", this.dyadRanker.getClass(), dyadRanker.getClass());
this.dyadRanker = dyadRanker;
}
public AbstractDyadScaler getScaler() {
return this.scaler;
}
public void setScaler(final AbstractDyadScaler scaler) {
if (this.useScaler) {
this.logger.trace("Update scaler. Was {} now is {}", this.scaler.getClass(), scaler.getClass());
} else {
this.logger.trace("Now using scaler {}.", scaler.getClass());
this.useScaler = true;
}
this.scaler = scaler;
// transform dataset
this.contextCharacterization = this.originalContextCharacterization.addConstantToCopy(0);
this.transformContextCharacterization();
}
private void transformContextCharacterization() {
this.logger.trace("Transform context characterization with scaler {}", this.scaler.getClass());
Dyad dyad = new Dyad(this.contextCharacterization, this.contextCharacterization);
DyadRankingDataset dataset = new DyadRankingDataset();
DenseDyadRankingInstance instance = new DenseDyadRankingInstance(Arrays.asList(dyad));
dataset.add(instance);
this.scaler.transformInstances(dataset);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/search/ADyadRankedNodeQueueConfig.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.search;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.IDyadRanker;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
import ai.libs.jaicore.ml.ranking.dyad.learner.util.AbstractDyadScaler;
import ai.libs.jaicore.ml.ranking.dyad.learner.util.DyadMinMaxScaler;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.IBestFirstQueueConfiguration;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
/**
* A configuration for a dyad ranked node queue. Mainly configures the dyad
* ranker.
*
* @author Helena Graf
*
* @param <N>
*/
public abstract class ADyadRankedNodeQueueConfig<N> implements IBestFirstQueueConfiguration<GraphSearchWithSubpathEvaluationsInput<N, String, Double>, N, String, Double> {
private Logger logger = LoggerFactory.getLogger(ADyadRankedNodeQueueConfig.class);
/**
* the ranker used to rank dyads consisting of pipeline metafeatures and dataset
* metafeatures
*/
protected IDyadRanker ranker;
/**
* for scaling the dyads
*/
protected AbstractDyadScaler scaler;
/**
* Construct a new dyad ranking node queue configuration.
*
* @throws IOException if the files for the default ranker and scaler are invalid or cannot be found
* @throws ClassNotFoundException if the default scaler or ranker cannot be instantiated
*/
public ADyadRankedNodeQueueConfig() throws IOException, ClassNotFoundException {
this.logger.trace("Load MinMaxScaler");
FileInputStream fis = new FileInputStream(new File("resources/draco/partial_pipeline_ranking/models/minmax_2500.ser"));
try (ObjectInputStream ois = new ObjectInputStream(fis)) {
this.scaler = (DyadMinMaxScaler) ois.readObject();
fis.close();
}
this.logger.trace("Load PL-Net Dyad Ranker");
PLNetDyadRanker plranker = new PLNetDyadRanker();
plranker.loadModelFromFile("resources/draco/partial_pipeline_ranking/models/ranker_2500.zip");
this.ranker = plranker;
}
/**
* Set the ranker used to rank the OPEN list.
*
* @return ranker the used ranker
*/
public IDyadRanker getRanker() {
return this.ranker;
}
/**
* Set the ranker used to rank the OPEN list.
*
* @param ranker the used ranker
*/
public void setRanker(final IDyadRanker ranker) {
this.ranker = ranker;
}
/**
* Get the scaler used to scale the dataset. Is pre-fit.
*
* @return the used scaler
*/
public AbstractDyadScaler getScaler() {
return this.scaler;
}
/**
* Set the scaler used to scale the dataset. Must be pre-fit.
*
* @param scaler the used scaler
*/
public void setScaler(final AbstractDyadScaler scaler) {
this.scaler = scaler;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/search/RandomlyRankedNodeQueue.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.search;
import java.util.LinkedList;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.search.model.travesaltree.BackPointerPath;
/**
* A node queue for the best first search that inserts new nodes at a random
* position in the list.
*
* @author Helena Graf
*
* @param <N>
* @param <V>
*/
@SuppressWarnings("serial")
public class RandomlyRankedNodeQueue<N, A, V extends Comparable<V>> extends LinkedList<BackPointerPath<N, A, V>> {
private Random random;
private transient Logger logger = LoggerFactory.getLogger(RandomlyRankedNodeQueue.class);
public RandomlyRankedNodeQueue(final int seed) {
this.random = new Random(seed);
}
/**
* Adds an element at a random position within the
*/
@Override
public boolean add(final BackPointerPath<N, A, V> e) {
int position = this.random.nextInt(this.size() + 1);
this.logger.debug("Add node at random position {} to OPEN list of size {}.", position, this.size());
super.add(position, e);
return true;
}
@Override
public void add(final int position, final BackPointerPath<N, A, V> e) {
throw new UnsupportedOperationException("Cannot place items at a specific position wihtin a randomly ranked queue!");
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((this.random == null) ? 0 : this.random.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("rawtypes")
RandomlyRankedNodeQueue other = (RandomlyRankedNodeQueue) obj;
if (this.random == null) {
if (other.random != null) {
return false;
}
} else if (!this.random.equals(other.random)) {
return false;
}
return true;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/search/RandomlyRankedNodeQueueConfig.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.search;
import java.io.IOException;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.BestFirst;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
/**
* Configuration for a {@link RandomlyRankedNodeQueue}
*
* @author Helena Graf
*
* @param <T>
*/
public class RandomlyRankedNodeQueueConfig<T> extends ADyadRankedNodeQueueConfig<T> {
/**
* random seed for randomizing the insertion of pipelines
*/
private int seed;
/**
* Construct a new config with the given seed.
*
* @param seed the seed to use
* @throws IOException
* @throws ClassNotFoundException
*/
public RandomlyRankedNodeQueueConfig(int seed) throws IOException, ClassNotFoundException {
super();
this.seed = seed;
}
@Override
public void configureBestFirst(
BestFirst<GraphSearchWithSubpathEvaluationsInput<T, String, Double>, T, String, Double> bestFirst) {
bestFirst.setOpen(new RandomlyRankedNodeQueue<>(seed));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/util/AbstractDyadScaler.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.util;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DenseDyadRankingInstance;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
import ai.libs.jaicore.ml.ranking.dyad.dataset.SparseDyadRankingInstance;
/**
* A scaler that can be fit to a certain dataset and then be used to standardize
* datasets, i.e. transform the data to have a mean of 0 and a standard
* deviation of 1 according to the data it was fit to.
*
* @author Michael Braun, Jonas Hanselle, Mirko Jürgens, Helena Graf
*
*/
public abstract class AbstractDyadScaler implements Serializable {
/**
*
*/
private static final long serialVersionUID = -825893010030419116L;
protected SummaryStatistics[] statsX;
protected SummaryStatistics[] statsY;
public SummaryStatistics[] getStatsX() {
return this.statsX;
}
public SummaryStatistics[] getStatsY() {
return this.statsY;
}
/**
* Fits the standard scaler to the dataset.
*
* @param dataset The dataset the scaler should be fit to.
*/
public void fit(final IDyadRankingDataset dataset) {
int lengthX = dataset.get(0).getLabel().get(0).getContext().length();
int lengthY = dataset.get(0).getLabel().get(0).getAlternative().length();
this.statsX = new SummaryStatistics[lengthX];
this.statsY = new SummaryStatistics[lengthY];
for (int i = 0; i < lengthX; i++) {
this.statsX[i] = new SummaryStatistics();
}
for (int i = 0; i < lengthY; i++) {
this.statsY[i] = new SummaryStatistics();
}
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
for (int i = 0; i < lengthX; i++) {
this.statsX[i].addValue(dyad.getContext().getValue(i));
}
for (int i = 0; i < lengthY; i++) {
this.statsY[i].addValue(dyad.getAlternative().getValue(i));
}
}
}
}
/**
* Transforms the entire dataset according to the mean and standard deviation of
* the data the scaler has been fit to.
*
* @param dataset The dataset to be standardized.
*/
public void transform(final IDyadRankingDataset dataset) {
int lengthX = dataset.get(0).getLabel().get(0).getContext().length();
int lengthY = dataset.get(0).getLabel().get(0).getAlternative().length();
if (lengthX != this.statsX.length || lengthY != this.statsY.length) {
throw new IllegalArgumentException("The scaler was fit to dyads with instances of length " + this.statsX.length + " and alternatives of length " + this.statsY.length + "\n but received instances of length " + lengthX
+ " and alternatives of length " + lengthY);
}
this.transformInstances(dataset);
this.transformAlternatives(dataset);
}
/**
* Transforms only the instances of each dyad according to the mean and standard
* of the data the scaler has been fit to.
*
* @param dataset The dataset of which the instances are to be standardized.
*/
public void transformInstances(final IDyadRankingDataset dataset) {
this.transformInstances(dataset, new ArrayList<>());
}
/**
* Transforms only the alternatives of each dyad according to the mean and
* standard deviation of the data the scaler has been fit to.
*
* @param dataset The dataset of which the alternatives are to be standardized.
*/
public void transformAlternatives(final IDyadRankingDataset dataset) {
this.transformAlternatives(dataset, new ArrayList<>());
}
/**
* Transforms only the instances of each dyad according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public abstract void transformInstances(IDyad dyad, List<Integer> ignoredIndices);
/**
* Transforms only the alternatives of each dyad according to the mean and
* standard deviation of the data the scaler has been fit to.
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public abstract void transformAlternatives(IDyad dyad, List<Integer> ignoredIndices);
/**
* Transforms an instance feature vector.
*
* @param Instance vector to be transformed
* @param ignoredIndices
*/
public abstract void transformInstaceVector(IVector vector, List<Integer> ignoredIndices);
/**
* Transforms only the instances of each dyad in a
* {@link SparseDyadRankingInstance} according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public void transformInstances(final SparseDyadRankingInstance drInstance, final List<Integer> ignoredIndices) {
this.transformInstaceVector(drInstance.getContext(), ignoredIndices);
}
/**
* Transforms only the instances of each dyad in a
* {@link DenseDyadRankingInstance} according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public void transformInstances(final IDyadRankingInstance drInstance, final List<Integer> ignoredIndices) {
for (IDyad dyad : drInstance) {
this.transformInstances(dyad, ignoredIndices);
}
}
/**
* Transforms only the alternatives of each dyad in an
* {@link IDyadRankingInstance} according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public void transformAlternatives(final IDyadRankingInstance drInstance, final List<Integer> ignoredIndices) {
for (IDyad dyad : drInstance) {
this.transformAlternatives(dyad, ignoredIndices);
}
}
/**
* Transforms only the instances of each dyad in a
* {@link DyadRankingDataset} according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public void transformInstances(final IDyadRankingDataset dataset, final List<Integer> ignoredIndices) {
for (IDyadRankingInstance instance : dataset) {
if (instance instanceof SparseDyadRankingInstance) {
SparseDyadRankingInstance drSparseInstance = (SparseDyadRankingInstance) instance;
this.transformInstances(drSparseInstance, ignoredIndices);
} else if (instance instanceof DenseDyadRankingInstance) {
DenseDyadRankingInstance drDenseInstance = (DenseDyadRankingInstance) instance;
this.transformInstances(drDenseInstance, ignoredIndices);
} else {
throw new IllegalArgumentException("The scalers only support SparseDyadRankingInstance and DyadRankingInstance!");
}
}
}
/**
* Transforms only the alternatives of each dyad in a
* {@link DyadRankingDataset} according to the mean and standard
* deviation of the data the scaler has been fit to. The attributes with indices
* contained in ignoredIndices are not transformed. {
*
* @param dataset The dataset of which the alternatives are to be
* standardized.
* @param ignoredIndices The {@link List} of indices that are been ignored by
* the scaler.
*/
public void transformAlternatives(final IDyadRankingDataset dataset, final List<Integer> ignoredIndices) {
for (IDyadRankingInstance instance : dataset) {
this.transformAlternatives(instance, ignoredIndices);
}
}
/**
* Fits the standard scaler to the dataset and transforms the entire dataset
* according to the mean and standard deviation of the dataset.
*
* @param dataset The dataset to be standardized.
*/
public void fitTransform(final IDyadRankingDataset dataset) {
this.fit(dataset);
this.transform(dataset);
}
/**
* Prints the standard devations of all features this scaler has been fit to.
*/
public String getPrettySTDString() {
if (this.statsX == null || this.statsY == null) {
throw new IllegalStateException("The scaler must be fit before calling this method!");
}
StringBuilder builder = new StringBuilder();
builder.append("Standard deviations for instances: ");
for (SummaryStatistics stats : this.statsX) {
builder.append(stats.getStandardDeviation());
builder.append(", ");
}
builder.append(System.lineSeparator());
builder.append("Standard deviations for alternatives: ");
for (SummaryStatistics stats : this.statsY) {
builder.append(stats.getStandardDeviation());
builder.append(", ");
}
builder.append(System.lineSeparator());
return builder.toString();
}
/**
* Returns a String for the means of all features this scaler has been fit to.
*/
public String getPrettyMeansString() {
if (this.statsX == null || this.statsY == null) {
throw new IllegalStateException("The scaler must be fit before calling this method!");
}
StringBuilder builder = new StringBuilder();
builder.append("Means for instances: ");
for (SummaryStatistics stats : this.statsX) {
builder.append(stats.getMean());
builder.append(", ");
}
builder.append(System.lineSeparator());
builder.append("Means for alternatives: ");
for (SummaryStatistics stats : this.statsY) {
builder.append(stats.getMean());
builder.append(", ");
}
builder.append(System.lineSeparator());
return builder.toString();
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/util/DyadMinMaxScaler.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.util;
import java.text.DecimalFormat;
import java.util.List;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
import ai.libs.jaicore.ml.ranking.dyad.dataset.DyadRankingDataset;
/**
* A scaler that can be fit to a certain dataset and then be used to normalize
* dyad datasets, i.e. transform the data such that the values of each feature
* lie between 0 and 1.
*
* For feature x: x = x - x_min / (x_max - x_min)
*
* @author Michael Braun, Mirko Jürgens, Helena Graf
*
*/
public class DyadMinMaxScaler extends AbstractDyadScaler {
/**
*
*/
private static final long serialVersionUID = -1319262573945961139L;
public void untransform(final DyadRankingDataset dataset) {
int lengthX = dataset.get(0).getLabel().get(0).getContext().length();
int lengthY = dataset.get(0).getLabel().get(0).getAlternative().length();
if (lengthX != this.statsX.length || lengthY != this.statsY.length) {
throw new IllegalArgumentException("The scaler was fit to dyads with instances of length " + this.statsX.length + " and alternatives of length " + this.statsY.length + "\n but received instances of length " + lengthX
+ " and alternatives of length " + lengthY);
}
this.untransformInstances(dataset);
this.untransformAlternatives(dataset);
}
/**
* Undoes the transformation of the instances of each dyad.
*
* @param dataset
*/
public void untransformInstances(final DyadRankingDataset dataset) {
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
this.untransformInstance(dyad);
}
}
}
/**
* Undoes the transformation of the instances of each dyad.
*
* @param dataset
* @param decimals number of decimal places for rounding
*/
public void untransformInstances(final DyadRankingDataset dataset, final int decimals) {
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
this.untransformInstance(dyad, decimals);
}
}
}
/**
* Undoes the transformation of the instance of a single dyad.
*
* @param dyad
*/
public void untransformInstance(final IDyad dyad) {
int lengthX = dyad.getContext().length();
if (lengthX != this.statsX.length) {
throw new IllegalArgumentException("The scaler was fit to instances of length " + this.statsX.length + " but received an instance of length " + lengthX + ".");
}
for (int i = 0; i < lengthX; i++) {
double value = dyad.getContext().getValue(i);
value *= this.statsX[i].getMax() - this.statsX[i].getMin();
value += this.statsX[i].getMin();
dyad.getContext().setValue(i, value);
}
}
/**
* Undoes the transformation of the instance of a single dyad.
*
* @param dyad
* @param decimals number of decimal places for rounding
*/
public void untransformInstance(final IDyad dyad, final int decimals) {
StringBuilder pattern = new StringBuilder();
pattern.append("#.");
for (int i = 0; i < decimals; i++) {
pattern.append("#");
}
int lengthX = dyad.getContext().length();
if (lengthX != this.statsX.length) {
throw new IllegalArgumentException("The scaler was fit to instances of length " + this.statsX.length + " but received an instance of length " + lengthX + ".");
}
DecimalFormat df = new DecimalFormat(pattern.toString());
for (int i = 0; i < lengthX; i++) {
double value = dyad.getContext().getValue(i);
value *= this.statsX[i].getMax() - this.statsX[i].getMin();
value += this.statsX[i].getMin();
dyad.getContext().setValue(i, Double.valueOf(df.format(value)));
}
}
/**
* Undoes the transformation of the alternatives of each dyad.
*
* @param dataset
*/
public void untransformAlternatives(final DyadRankingDataset dataset) {
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
this.untransformAlternative(dyad);
}
}
}
/**
* Undoes the transformation of the alternatives of each dyad.
*
* @param dataset
* @param decimals number of de
*/
public void untransformAlternatives(final DyadRankingDataset dataset, final int decimals) {
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
this.untransformAlternative(dyad, decimals);
}
}
}
/**
* Undoes the transformation on the alternative of a single dyad.
*
* @param dyad
*/
public void untransformAlternative(final IDyad dyad) {
int lengthY = dyad.getAlternative().length();
if (lengthY != this.statsY.length) {
throw new IllegalArgumentException("The scaler was fit to alternatives of length " + this.statsY.length + " but received an alternative of length " + lengthY + ".");
}
for (int i = 0; i < lengthY; i++) {
double value = dyad.getAlternative().getValue(i);
value *= this.statsY[i].getMax() - this.statsY[i].getMin();
value += this.statsY[i].getMin();
dyad.getAlternative().setValue(i, value);
}
}
/**
* Undoes the transformation on the alternative of a single dyad.
*
* @param dyad
*/
public void untransformAlternative(final IDyad dyad, final int decimals) {
StringBuilder pattern = new StringBuilder();
pattern.append("#.");
for (int i = 0; i < decimals; i++) {
pattern.append("#");
}
int lengthY = dyad.getAlternative().length();
if (lengthY != this.statsY.length) {
throw new IllegalArgumentException("The scaler was fit to alternatives of length " + this.statsY.length + " but received an alternative of length " + lengthY + ".");
}
DecimalFormat df = new DecimalFormat(pattern.toString());
for (int i = 0; i < lengthY; i++) {
double value = dyad.getAlternative().getValue(i);
value *= this.statsY[i].getMax() - this.statsY[i].getMin();
value += this.statsY[i].getMin();
dyad.getAlternative().setValue(i, Double.valueOf(df.format(value)));
}
}
/**
* Returns a String the maxima of all features this scaler has been fit to.
*/
public String getPrettyMaximaString() {
if (this.statsX == null || this.statsY == null) {
throw new IllegalStateException("The scaler must be fit before calling this method!");
}
StringBuilder builder = new StringBuilder();
builder.append("Standard deviations for instances: ");
for (SummaryStatistics stats : this.statsX) {
builder.append(stats.getMax());
builder.append(", ");
}
builder.append(System.lineSeparator());
builder.append("Standard deviations for alternatives: ");
for (SummaryStatistics stats : this.statsY) {
builder.append(stats.getMax());
builder.append(", ");
}
builder.append(System.lineSeparator());
return builder.toString();
}
/**
* Returns a String for the minima of all features this scaler has been fit to.
*/
public String getPrettyMinimaString() {
if (this.statsX == null || this.statsY == null) {
throw new IllegalStateException("The scaler must be fit before calling this method!");
}
StringBuilder builder = new StringBuilder();
builder.append("Means for instances: ");
for (SummaryStatistics stats : this.statsX) {
builder.append(stats.getMin());
builder.append(", ");
}
builder.append(System.lineSeparator());
builder.append("Means for alternatives: ");
for (SummaryStatistics stats : this.statsY) {
builder.append(stats.getMin());
builder.append(", ");
}
builder.append(System.lineSeparator());
return builder.toString();
}
@Override
public void transformInstances(final IDyad dyad, final List<Integer> ignoredIndices) {
for (int i = 0; i < dyad.getContext().length(); i++) {
double value = dyad.getContext().getValue(i);
value -= this.statsX[i].getMin();
// prevent division by zero
if ((this.statsX[i].getMax() - this.statsX[i].getMin()) != 0) {
value /= this.statsX[i].getMax() - this.statsX[i].getMin();
}
dyad.getContext().setValue(i, value);
}
}
@Override
public void transformAlternatives(final IDyad dyad, final List<Integer> ignoredIndices) {
for (int i = 0; i < dyad.getAlternative().length(); i++) {
if (!ignoredIndices.contains(i)) {
double value = dyad.getAlternative().getValue(i);
value -= this.statsY[i].getMin();
// prevent division by zero
if ((this.statsY[i].getMax() - this.statsY[i].getMin()) != 0) {
value /= this.statsY[i].getMax() - this.statsY[i].getMin();
}
dyad.getAlternative().setValue(i, value);
}
}
}
@Override
public void transformInstaceVector(final IVector vector, final List<Integer> ignoredIndices) {
for (int i = 0; i < vector.length(); i++) {
double value = vector.getValue(i);
value -= this.statsX[i].getMin();
// prevent division by zero
if ((this.statsX[i].getMax() - this.statsX[i].getMin()) != 0) {
value /= this.statsX[i].getMax() - this.statsX[i].getMin();
}
vector.setValue(i, value);
}
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/util/DyadStandardScaler.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.util;
import java.util.List;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.common.math.IVector;
/**
* A scaler that can be fit to a certain dataset and then be used to standardize
* datasets, i.e. transform the data to have a mean of 0 and a standard
* deviation of 1 according to the data it was fit to.
*
* @author Michael Braun, Jonas Hanselle, Mirko Jürgens
*
*/
public class DyadStandardScaler extends AbstractDyadScaler {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public void transformInstances(final IDyad dyad, final List<Integer> ignoredIndices) {
for (int i = 0; i < dyad.getContext().length(); i++) {
if (!ignoredIndices.contains(i)) {
double value = dyad.getContext().getValue(i);
value -= this.statsX[i].getMean();
if (this.statsX[i].getStandardDeviation() != 0) {
value /= this.statsX[i].getStandardDeviation();
}
dyad.getContext().setValue(i, value);
}
}
}
@Override
public void transformAlternatives(final IDyad dyad, final List<Integer> ignoredIndices) {
for (int i = 0; i < dyad.getAlternative().length(); i++) {
if (!ignoredIndices.contains(i)) {
double value = dyad.getAlternative().getValue(i);
value -= this.statsY[i].getMean();
if (this.statsY[i].getStandardDeviation() != 0) {
value /= this.statsY[i].getStandardDeviation();
}
dyad.getAlternative().setValue(i, value);
}
}
}
@Override
public void transformInstaceVector(final IVector vector, final List<Integer> ignoredIndices) {
for (int i = 0; i < vector.length(); i++) {
if (!ignoredIndices.contains(i)) {
double value = vector.getValue(i);
value -= this.statsX[i].getMean();
if (this.statsX[i].getStandardDeviation() != 0) {
value /= this.statsX[i].getStandardDeviation();
}
vector.setValue(i, value);
}
}
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/util/DyadUnitIntervalScaler.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.util;
import java.util.List;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyad;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingDataset;
import org.api4.java.ai.ml.ranking.dyad.dataset.IDyadRankingInstance;
import org.api4.java.common.math.IVector;
/**
* A scaler that can be fit to a certain dataset and then be used to normalize
* datasets, i.e. transform the data to have a length of 1.
*
* @author Mirko Jürgens
*
*/
public class DyadUnitIntervalScaler extends AbstractDyadScaler {
private double[] lengthOfX;
private double[] lengthOfY;
@Override
public void fit(final IDyadRankingDataset dataset) {
super.fit(dataset);
int lengthX = dataset.get(0).getLabel().get(0).getContext().length();
this.lengthOfX = new double[lengthX];
for (int i = 0; i < lengthX; i++) {
this.lengthOfX[i] = Math.sqrt(this.statsX[i].getSumsq());
}
int lengthY = dataset.get(0).getLabel().get(0).getAlternative().length();
this.lengthOfY = new double[lengthY];
for (int i = 0; i < lengthY; i++) {
this.lengthOfY[i] = Math.sqrt(this.statsY[i].getSumsq());
}
}
/**
*
*/
private static final long serialVersionUID = -6732663643697649308L;
@Override
public void transformInstances(final IDyadRankingDataset dataset, final List<Integer> ignoredIndices) {
int lengthX = dataset.get(0).getLabel().get(0).getContext().length();
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
for (int i = 0; i < lengthX; i++) {
double value = dyad.getContext().getValue(i);
if (value != 0.0d) {
value /= this.lengthOfX[i];
}
dyad.getContext().setValue(i, value);
}
}
}
}
@Override
public void transformAlternatives(final IDyadRankingDataset dataset, final List<Integer> ignoredIndices) {
int lengthY = dataset.get(0).getLabel().get(0).getAlternative().length();
for (IDyadRankingInstance instance : dataset) {
for (IDyad dyad : instance) {
for (int i = 0; i < lengthY; i++) {
double value = dyad.getAlternative().getValue(i);
if (value != 0.0d) {
value /= this.lengthOfY[i];
}
dyad.getAlternative().setValue(i, value);
}
}
}
}
@Override
public void transformInstaceVector(final IVector vector, final List<Integer> ignoredIndices) {
for (int i = 0; i < vector.length(); i++) {
if (!ignoredIndices.contains(i)) {
double value = vector.getValue(i);
if (value != 0.0d) {
value /= Math.sqrt(this.statsX[i].getSumsq());
}
vector.setValue(i, value);
}
}
}
@Override
public void transformInstances(final IDyad dyad, final List<Integer> ignoredIndices) {
throw new UnsupportedOperationException("Not yet implemented!");
}
@Override
public void transformAlternatives(final IDyad dyad, final List<Integer> ignoredIndices) {
throw new UnsupportedOperationException("Not yet implemented!");
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot/inputoptimization/InputOptimizerLoss.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.zeroshot.inputoptimization;
import org.nd4j.linalg.api.ndarray.INDArray;
public interface InputOptimizerLoss {
public double loss(INDArray plNetOutput);
public double lossGradient(INDArray plNetOutput);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot/inputoptimization/NegIdentityInpOptLoss.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.zeroshot.inputoptimization;
import org.nd4j.linalg.api.ndarray.INDArray;
/**
* Loss function for PLNet input optimization that maximizes the output of a PLNet. (i.e. minimizes the negative output)
* @author Michael Braun
*
*/
public class NegIdentityInpOptLoss implements InputOptimizerLoss {
@Override
public double loss(INDArray plNetOutput) {
return - plNetOutput.getDouble(0);
}
@Override
public double lossGradient(INDArray plNetOutput) {
return -1.0;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot/inputoptimization/PLNetInputOptimizer.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.zeroshot.inputoptimization;
import org.deeplearning4j.nn.gradient.Gradient;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.BooleanIndexing;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.indexing.conditions.Conditions;
import org.nd4j.linalg.primitives.Pair;
import ai.libs.jaicore.ml.ranking.dyad.learner.algorithm.PLNetDyadRanker;
import ai.libs.jaicore.ml.ranking.dyad.learner.util.DyadMinMaxScaler;
import ai.libs.jaicore.ml.ranking.dyad.learner.zeroshot.util.InputOptListener;
/**
* Optimizes a given loss function ({@link InputOptimizerLoss}) with respect to the input of a PLNet using gradient descent.
* Assumes the PLNet was trained on normalized training data (i.e. scaled to intervals of 0 to 1 using {@link DyadMinMaxScaler})
* and ensures that the optimized inputs will be within this range.
*
* @author Michael Braun
*
*/
public class PLNetInputOptimizer {
private InputOptListener listener;
/**
* Optimizes the given loss function with respect to a given PLNet's inputs using gradient descent. Ensures the outcome will be within the range of 0 and 1.
* Performs gradient descent for a given number of steps starting at a given input, using a static learning rate.
* The inputs that should be optimized can be specified using an index range in the form of a {@link Pair}} of integers.
* @param plNet PLNet whose inputs to optimize.
* @param input Initial inputs to start the gradient descent procedure from.
* @param loss The loss to be minimized.
* @param learningRate The initial learning rate.
* @param numSteps The number of steps to perform gradient descent for.
* @param indexRange Pair of indices (inclusive) specifying the parts of the input that should be optimized.
* @return The input optimized with respect to the given loss.
*/
public INDArray optimizeInput(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss, double learningRate, int numSteps, Pair<Integer, Integer> indexRange) {
INDArray mask;
if (indexRange != null) {
mask = Nd4j.zeros(input.length());
mask.get(NDArrayIndex.interval(indexRange.getFirst(), indexRange.getSecond())).assign(1.0);
} else {
mask = Nd4j.ones(input.length());
}
return optimizeInput(plNet, input, loss, learningRate, numSteps, mask);
}
/**
* Optimizes the given loss function with respect to a given PLNet's inputs using gradient descent. Ensures the outcome will be within the range of 0 and 1.
* Performs gradient descent for a given number of steps starting at a given input, using a linearly decaying learning rate.
* The inputs that should be optimized can be specified using an index range in the form of a {@link Pair}} of integers.
* @param plNet PLNet whose inputs to optimize.
* @param input Initial inputs to start the gradient descent procedure from.
* @param loss The loss to be minimized.
* @param initialLearningRate The initial learning rate.
* @param finalLearningRate The value the learning rate should decay to.
* @param numSteps The number of steps to perform gradient descent for.
* @param indexRange Pair of indices (inclusive) specifying the parts of the input that should be optimized.
* @return The input optimized with respect to the given loss.
*/
public INDArray optimizeInput(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss, double initialLearningRate, double finalLearningRate, int numSteps, Pair<Integer, Integer> indexRange) {
INDArray mask;
if (indexRange != null) {
mask = Nd4j.zeros(input.length());
mask.get(NDArrayIndex.interval(indexRange.getFirst(), indexRange.getSecond())).assign(1.0);
} else {
mask = Nd4j.ones(input.length());
}
return optimizeInput(plNet, input, loss, initialLearningRate, finalLearningRate, numSteps, mask);
}
/**
* Optimizes the given loss function with respect to a given PLNet's inputs using gradient descent. Ensures the outcome will be within the range of 0 and 1.
* Performs gradient descent for a given number of steps starting at a given input, using a static learning rate.
* The inputs that should be optimized can be specified using a 0,1-vector
* @param plNet PLNet whose inputs to optimize.
* @param input Initial inputs to start the gradient descent procedure from.
* @param loss The loss to be minimized.
* @param learningRate The initial learning rate.
* @param numSteps The number of steps to perform gradient descent for.
* @param inputMask 0,1 vector specifying the inputs to optimize, i.e. should have a 1 at the index of any input that should be optimized and a 0 elsewhere.
* @return The input optimized with respect to the given loss.
*/
public INDArray optimizeInput(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss, double learningRate, int numSteps, INDArray inputMask) {
return optimizeInput(plNet, input, loss, learningRate, learningRate, numSteps, inputMask);
}
/**
* Optimizes the given loss function with respect to a given PLNet's inputs using gradient descent. Ensures the outcome will be within the range of 0 and 1.
* Performs gradient descent for a given number of steps starting at a given input, using a linearly decaying learning rate.
* The inputs that should be optimized can be specified using a 0,1-vector
* @param plNet PLNet whose inputs to optimize.
* @param input Initial inputs to start the gradient descent procedure from.
* @param loss The loss to be minimized.
* @param initialLearningRate The initial learning rate.
* @param finalLearningRate The value the learning rate should decay to.
* @param numSteps The number of steps to perform gradient descent for.
* @param inputMask 0,1 vector specifying the inputs to optimize, i.e. should have a 1 at the index of any input that should be optimized and a 0 elsewhere.
* @return The input optimized with respect to the given loss.
*/
public INDArray optimizeInput(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss, double initialLearningRate, double finalLearningRate, int numSteps,
INDArray inputMask) {
INDArray inp = input.dup();
INDArray alphas = Nd4j.zeros(inp.shape());
INDArray betas = Nd4j.zeros(inp.shape());
INDArray ones = Nd4j.ones(inp.shape());
double output = plNet.getPlNet().output(inp).getDouble(0);
double incumbentOutput = output;
INDArray incumbent = inp.dup();
for (int i = 0; i < numSteps; i++) {
double lrDecayTerm = (double) i / (double) numSteps;
double learningRate = (1 - lrDecayTerm) * initialLearningRate + lrDecayTerm * finalLearningRate;
// Gradient of PLNet
INDArray grad = computeInputDerivative(plNet, inp, loss);
// Gradient of KKT term
grad.subi(alphas);
grad.addi(betas);
// Apply gradient to alphas and betas
alphas.subi(inp);
betas.addi(inp.sub(ones));
BooleanIndexing.replaceWhere(alphas, 0.0d, Conditions.lessThan(0.0d));
BooleanIndexing.replaceWhere(betas, 0.0d, Conditions.lessThan(0.0d));
grad.muli(inputMask);
grad.muli(learningRate);
inp.subi(grad);
output = plNet.getPlNet().output(inp).getDouble(0);
if (listener != null) {
listener.reportOptimizationStep(inp, output);
}
INDArray incCheck = inp.dup().muli(inputMask);
if (output > incumbentOutput && BooleanIndexing.and(incCheck, Conditions.greaterThanOrEqual(0.0d)) && BooleanIndexing.and(incCheck, Conditions.lessThanOrEqual(1.0d))) {
incumbent = inp.dup();
incumbentOutput = output;
}
}
return incumbent;
}
private static INDArray computeInputDerivative(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss) {
MultiLayerNetwork net = plNet.getPlNet();
INDArray output = net.output(input);
INDArray lossGradient = Nd4j.create(new double[] { loss.lossGradient(output) });
net.setInput(input);
net.feedForward(false, false);
Pair<Gradient, INDArray> p = net.backpropGradient(lossGradient, null);
return p.getSecond();
}
/**
* Set an {@link InputOptListener} to record the intermediate steps of the optimization procedure.
* @param listener
*/
public void setListener(InputOptListener listener) {
this.listener = listener;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/dyad/learner/zeroshot/util/InputOptListener.java
|
package ai.libs.jaicore.ml.ranking.dyad.learner.zeroshot.util;
import java.util.ArrayList;
import java.util.List;
import org.nd4j.linalg.api.ndarray.INDArray;
public class InputOptListener {
private int[] indicesToWatch;
private List<INDArray> inputList;
private List<Double> outputList;
public InputOptListener(int[] indicesToWatch) {
this.indicesToWatch = indicesToWatch;
this.inputList = new ArrayList<>();
this.outputList = new ArrayList<>();
}
public void reportOptimizationStep(INDArray plNetInput, double plNetOutput) {
INDArray inpToAdd = plNetInput.getColumns(indicesToWatch);
inputList.add(inpToAdd);
outputList.add(plNetOutput);
}
public List<INDArray> getInputList() {
return inputList;
}
public List<Double> getOutputList() {
return outputList;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/filter/PairWisePreferenceToBinaryClassificationFilter.java
|
package ai.libs.jaicore.ml.ranking.filter;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.filter.FilterApplicationFailedException;
import org.api4.java.ai.ml.core.filter.unsupervised.IUnsupervisedFilter;
public class PairWisePreferenceToBinaryClassificationFilter implements IUnsupervisedFilter {
private final Object labelA;
private final Object labelB;
public PairWisePreferenceToBinaryClassificationFilter(final Object labelA, final Object labelB) {
this.labelA = labelA;
this.labelB = labelB;
}
public Object getLabelA() {
return this.labelA;
}
public Object getLabelB() {
return this.labelB;
}
@Override
public ILabeledDataset<ILabeledInstance> predict(final ILabeledDataset<ILabeledInstance> input) throws InterruptedException, FilterApplicationFailedException {
throw new UnsupportedOperationException();
}
@Override
public ILabeledDataset<ILabeledInstance> predict(final ILabeledInstance input) throws InterruptedException, FilterApplicationFailedException {
throw new UnsupportedOperationException();
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/filter/package-info.java
|
package ai.libs.jaicore.ml.ranking.filter;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/package-info.java
|
/**
* Label ranking package.
*
* @author mwever
*
*/
package ai.libs.jaicore.ml.ranking.label;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/IConfigurableLabelRanker.java
|
package ai.libs.jaicore.ml.ranking.label.learner;
public interface IConfigurableLabelRanker {
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/IGroupBasedRanker.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased;
import org.api4.java.ai.ml.ranking.dataset.IRankingDataset;
import org.api4.java.ai.ml.ranking.dataset.IRankingInstance;
import org.api4.java.ai.ml.ranking.learner.IRanker;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.RankingForGroup;
/**
* @author Helen
*
* @param <Z> The center of the groups that have rankings
* @param <I> The problem instances that get grouped and used to find good solutions for them
* @param <O> Solutions that were tested for problem instances and are getting ranked for
* for groups of them
*/
public interface IGroupBasedRanker<O, I extends IRankingInstance<O>, D extends IRankingDataset<O, I>, Z> extends IRanker<O, I, D> {
public RankingForGroup<Z, O> getRanking(I prob);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/IGroupBuilder.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased;
import java.util.List;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Group;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.ProblemInstance;
/**
* IGroupBuilder discribes the act of building groups out of probleminstances
*
* @author Helen Beierling
*
* @param <C>
* Centers of the found groups
* @param <I>
* Probleminstaces to group and grouped instances
*/
public interface IGroupBuilder<C, I> {
List<Group<C, I>> buildGroup(List<ProblemInstance<I>> allInstances);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/IGroupSolutionRankingSelect.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Group;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.RankingForGroup;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Table;
/**
*
* @author Helen Beierling
*
* @param <C> The identifier of the considered group
* @param <S> The solutions that are in the group and are getting ranked over all probleminstances
* the in the group
* @param <I> The instances in the group
* @param <P> The performances of the solutions for an probleminstances
*/
public interface IGroupSolutionRankingSelect<C,S,I,P> {
/**
* @param consideredGroup The group for which a ranking is to choose
* @param collectInformation The information that was collected for the problem instances in
* the group from previous tests.
* @return A Ranking of Solutions that performs well for the probleminstances in the group
*/
RankingForGroup<C,S> selectGroupsolutionRanking(Group<C,I> consideredGroup,Table<I,S,P>collectInformation);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/candidateprovider/IRankedSolutionCandidateProvider.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.candidateprovider;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.ProblemInstance;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Ranking;
public interface IRankedSolutionCandidateProvider<I,S> {
Ranking<S> getCandidate(ProblemInstance<I> instance);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/Group.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
import java.util.List;
/**
* Group.java - Stores a group with it center as ID and the associated instances
*
* @author Helen Bierling
*
* @param <C>
* The identifier of the group
* @param <I>
* The instances in the group
*/
public class Group<C, I> {
private List<ProblemInstance<I>> problemInstances;
private GroupIdentifier<C> groupIdentifier;
public Group(final List<ProblemInstance<I>> instanlist, final GroupIdentifier<C> id) {
this.problemInstances = instanlist;
this.groupIdentifier = id;
}
public List<ProblemInstance<I>> getInstances() {
return this.problemInstances;
}
public void setInstances(final List<ProblemInstance<I>> newInstances) {
this.problemInstances = newInstances;
}
public void setGroupIdentifier(final GroupIdentifier<C> newIdentifer) {
this.groupIdentifier = newIdentifer;
}
public void addInstance(final ProblemInstance<I> newInstance) {
this.problemInstances.add(newInstance);
}
public GroupIdentifier<C> getId() {
return this.groupIdentifier;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/GroupIdentifier.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
/**
* @author Helen Beierling
*
* @param <C> An identifier of a group
*/
public class GroupIdentifier<C> {
private C identifier;
public GroupIdentifier(final C id){
this.identifier = id;
}
public C getIdentifier(){
return this.identifier;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/ProblemInstance.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
public class ProblemInstance<I> {
/**
* @author Helen Beierling
*
* @param <I> stands for the observed instance
*/
private I instance;
public ProblemInstance() {}
public ProblemInstance(I inst) {
this.instance = inst;
}
public I getInstance() {
return instance;
}
public void setInstance(I newinstance) {
this.instance = newinstance;
}
public boolean isEmpty() {
return instance != null;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/Ranking.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import org.api4.java.ai.ml.ranking.IRanking;
public class Ranking<O> extends ArrayList<O> implements IRanking<O> {
private static final long serialVersionUID = 6925500382758165610L;
private static final String MSG_NO_PROBABILITIES = "Ranking predictions are not equipped with probabilities by default.";
public Ranking(final Collection<O> items) {
super(items);
}
public Ranking() {
super();
}
@Override
public Ranking<O> getPrediction() {
return this;
}
@Override
public Object getLabelWithHighestProbability() {
throw new UnsupportedOperationException(MSG_NO_PROBABILITIES);
}
@Override
public Map<?, Double> getClassDistribution() {
throw new UnsupportedOperationException(MSG_NO_PROBABILITIES);
}
@Override
public Map<?, Double> getClassConfidence() {
throw new UnsupportedOperationException(MSG_NO_PROBABILITIES);
}
@Override
public double getProbabilityOfLabel(final Object label) {
throw new UnsupportedOperationException(MSG_NO_PROBABILITIES);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/RankingForGroup.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
import java.util.List;
/**
* RankingForGroup.java - saves a solution ranking for a group identified by thier group
*
* @author Helen Beierling
*
* @param <C> The identifier of the group
* @param <O> The solutions that are ranked best for a group of probleminstances
*/
public class RankingForGroup<C, O> extends Ranking<O> {
/**
*
*/
private static final long serialVersionUID = -8923800257075362730L;
private transient GroupIdentifier<C> identifierOfGroup;
public RankingForGroup(final GroupIdentifier<C> identifier, final List<O> solutionsForGroup) {
super(solutionsForGroup);
this.identifierOfGroup = identifier;
}
public GroupIdentifier<C> getIdentifierForGroup() {
return this.identifierOfGroup;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((this.identifierOfGroup == null) ? 0 : this.identifierOfGroup.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
RankingForGroup<?, ?> other = (RankingForGroup<?, ?>) obj;
if (this.identifierOfGroup == null) {
if (other.identifierOfGroup != null) {
return false;
}
} else if (!this.identifierOfGroup.equals(other.identifierOfGroup)) {
return false;
}
return true;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/customdatatypes/Table.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes;
import java.util.ArrayList;
import java.util.HashMap;
import ai.libs.jaicore.basic.sets.Pair;
/**
* Table.java - This class is used to store probleminstance and their according solutions and
* performances for that solution.
*
* @author Helen Beierling
*
* @param <I> ProblemInstances saved in the table
* @param <S> Solutions for a probleminstance
* @param <P> Performance of a solution for a probleminstance
*/
public class Table<I, S, P> {
private HashMap<ProblemInstance<I>, ArrayList<Pair<S, P>>> informationForRanking;
public Table() {
this.informationForRanking = new HashMap<>();
}
/**
* Gets the Solutions for a given probleminstance.
*
* @param consideredProblemInstance The consideredProblemInstance
* @return Gives an ArrayList with all solutions for a probleminstance
*/
ArrayList<S> getSolutionforProblemInstanceTable(final ProblemInstance<I> consideredProblemInstance) {
ArrayList<Pair<S, P>> listOfInformationForProblemInst = this.informationForRanking.get(consideredProblemInstance);
ArrayList<S> solutionsForInstance = new ArrayList<>();
for (Pair<S, P> i : listOfInformationForProblemInst) {
solutionsForInstance.add(i.getX());
}
return solutionsForInstance;
}
/**
* Gets the Performance for a given probleminstance.
*
* @param consideredProblemInstance the considered problemInstance
* @return Gives an ArrayList with all performances for a probleminstance
*/
ArrayList<P> getPerformanceforProblemInstanceTable(final ProblemInstance<I> consideredProblemInstance) {
ArrayList<Pair<S, P>> listOfInformationForProblemInst = this.informationForRanking.get(consideredProblemInstance);
ArrayList<P> performanceForInstance = new ArrayList<>();
for (Pair<S, P> i : listOfInformationForProblemInst) {
performanceForInstance.add(i.getY());
}
return performanceForInstance;
}
/**
* Gets the list of all Solutions and the performance values with that
* for a given probleminstance
*
* @param consideredProblemInstance the considered problemInstance
* @return Gives an ArrayList of tuple consisting of the solution and its performance
*/
ArrayList<Pair<S, P>> getInfromationforInstance(final ProblemInstance<I> consideredProblemInstance) {
return this.informationForRanking.get(consideredProblemInstance);
}
/**
* Gets all information for all saved probleminstances
*
* @return The hashmap with the probleminstances as keys and their solutions and the according
* performances as values.
*/
HashMap<ProblemInstance<I>, ArrayList<Pair<S, P>>> getInformationForRanking() {
return this.informationForRanking;
}
/**
* Adds a new probleminstace to the table as well as the according solutions and performances
*
* @param newProblemInstanceForTab the new Instance
* @param informationForInstance adds a new key,value pair to the Hashmap Table
*/
void addProblemInstanceToTable(final ProblemInstance<I> newProblemInstanceForTab, final ArrayList<Pair<S, P>> informationForInstance) {
this.informationForRanking.put(newProblemInstanceForTab, informationForInstance);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/datamanager/IInstanceCollector.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.datamanager;
import java.util.List;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.ProblemInstance;
public interface IInstanceCollector <I>{
List<ProblemInstance<I>> getProblemInstances();
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/label/learner/clusterbased/datamanager/ITableGeneratorandCompleter.java
|
package ai.libs.jaicore.ml.ranking.label.learner.clusterbased.datamanager;
import java.util.List;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.ProblemInstance;
import ai.libs.jaicore.ml.ranking.label.learner.clusterbased.customdatatypes.Table;
public interface ITableGeneratorandCompleter<I, S, P> {
public Table<I, S, P> getInforamtionforRanking(List<ProblemInstance<I>> instancesToRank);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/ARankingPredictionPerformanceMeasure.java
|
package ai.libs.jaicore.ml.ranking.loss;
import java.util.List;
import java.util.OptionalDouble;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.loss.IRankingPredictionPerformanceMeasure;
import ai.libs.jaicore.ml.classification.loss.dataset.APredictionPerformanceMeasure;
public abstract class ARankingPredictionPerformanceMeasure extends APredictionPerformanceMeasure<IRanking<?>, IRanking<?>> implements IRankingPredictionPerformanceMeasure {
@Override
public double loss(final List<? extends IRanking<?>> expected, final List<? extends IRanking<?>> actual) {
OptionalDouble res = IntStream.range(0, expected.size()).mapToDouble(x -> this.loss(expected.get(0), actual.get(0))).average();
if (res.isPresent()) {
return res.getAsDouble();
}
throw new IllegalStateException("Could not aggregate " + this.getClass().getSimpleName());
}
public abstract double loss(final IRanking<?> expected, IRanking<?> actual);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/KendallsTauDyadRankingLoss.java
|
package ai.libs.jaicore.ml.ranking.loss;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.loss.IRankingPredictionPerformanceMeasure;
/**
* Computes the rank correlation measure known as Kendall's tau coefficient, i.e.
* (C - D) / (K * (K-1) /2), where C and D are the number of concordant (put in the right order)
* and discordant (put in the wrong order) pairs of dyads and K is the length of the dyad ranking.
* Lies between -1 (reversed order) and +1 (same order).
* Assumes the dyads in the ranking to be pairwise distinct.
*
* @author mbraun
* @author mwever
*
*/
public class KendallsTauDyadRankingLoss extends ARankingPredictionPerformanceMeasure implements IRankingPredictionPerformanceMeasure {
@Override
public double loss(final IRanking<?> expected, final IRanking<?> predicted) {
int dyadRankingLength = expected.size();
if (dyadRankingLength <= 1) {
throw new IllegalArgumentException("Dyad rankings must have length greater than 1.");
}
int nConc = 0;
int nDisc = 0;
for (int predIndex = 0; predIndex < dyadRankingLength - 1; predIndex++) {
Object predDyad = predicted.get(predIndex);
int actualIndex = -1;
for (int i = 0; i < dyadRankingLength; i++) {
if (expected.get(i).equals(predDyad)) {
actualIndex = i;
break;
}
}
for (int i = predIndex + 1; i < dyadRankingLength; i++) {
if (this.isRankingCorrectForIndex(expected, predicted, dyadRankingLength, actualIndex, i)) {
nConc++;
} else {
nDisc++;
}
}
}
return 2.0 * (nConc - nDisc) / (dyadRankingLength * (dyadRankingLength - 1));
}
private boolean isRankingCorrectForIndex(final IRanking<?> actual, final IRanking<?> predicted, final int dyadRankingLength, final int actualIndex, final int i) {
Object predPairedDyad = predicted.get(i);
boolean found = false;
for (int j = actualIndex + 1; j < dyadRankingLength && !found; j++) {
if (actual.get(j).equals(predPairedDyad)) {
found = true;
}
}
return found;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/KendallsTauOfTopK.java
|
package ai.libs.jaicore.ml.ranking.loss;
import java.util.List;
import java.util.OptionalDouble;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.loss.IRankingPredictionPerformanceMeasure;
/**
* Calculates the kendalls-tau loss only for the top k dyads.
*
* https://researcher.watson.ibm.com/researcher/files/us-fagin/topk.pdf
*
* @author mirkoj
* @author mwever
*
*/
public class KendallsTauOfTopK extends ARankingPredictionPerformanceMeasure implements IRankingPredictionPerformanceMeasure {
private int k;
private double p;
public KendallsTauOfTopK(final int k, final double p) {
this.k = k;
this.p = p;
}
@Override
public double loss(final List<? extends IRanking<?>> expected, final List<? extends IRanking<?>> actual) {
OptionalDouble res = IntStream.range(0, expected.size()).mapToDouble(x -> this.loss(expected.get(0), actual.get(0))).average();
if (res.isPresent()) {
return res.getAsDouble();
}
throw new IllegalStateException("Could not aggregate kendalls tau of top k");
}
@Override
public double loss(final IRanking<?> actual, final IRanking<?> predicted) {
if (this.k <= 1) {
throw new IllegalArgumentException("Rankings must have length greater than 1.");
}
double kendallsDistance = 0;
for (int actualI = 0; actualI < actual.size() - 1; actualI++) {
Object actualDyad = actual.get(actualI);
int predictedI = -1;
for (int i = 0; i < predicted.size(); i++) {
if (predicted.get(i).equals(actualDyad)) {
predictedI = i;
break;
}
}
for (int actualJ = actualI + 1; actualJ < actual.size(); actualJ++) {
Object actPairedDyad = actual.get(actualJ);
int predictedJ = -1;
for (int j = 0; j < predicted.size(); j++) {
if (predicted.get(j).equals(actPairedDyad)) {
predictedJ = j;
break;
}
}
double penalty = 0;
boolean iAndJAreBothInPredictedTopK = predictedI < this.k && predictedJ < this.k;
boolean iAndJAreBothInActualTopK = actualI < this.k && actualJ < this.k;
// case 1: i,j are both in the top k list of the predicted and actual ranking
penalty = this.checkCase1(actualI, predictedI, actualJ, predictedJ, penalty, iAndJAreBothInPredictedTopK, iAndJAreBothInActualTopK);
boolean justIIsInPredictedTopK = predictedI < this.k && predictedJ >= this.k;
boolean justJIsInPredictedTopK = predictedJ < this.k && predictedI >= this.k;
boolean justIIsInActualTopK = actualI < this.k && actualJ >= this.k;
boolean justJIsInActualTopK = actualJ < this.k && actualI >= this.k;
// case 2: i,j are both in one top k ranking but for the other ranking just one
// is in the top k
penalty = this.checkCase2(actualI, predictedI, actualJ, predictedJ, penalty, iAndJAreBothInPredictedTopK, iAndJAreBothInActualTopK, justIIsInPredictedTopK, justJIsInPredictedTopK, justIIsInActualTopK, justJIsInActualTopK);
// case 3: i, but not j, appears in one top k list , and j, but not i, appears
// in the other top k list
penalty = this.checkCase3(penalty, justIIsInPredictedTopK, justJIsInPredictedTopK, justIIsInActualTopK, justJIsInActualTopK);
// case 4:
penalty = this.checkCase4(actualI, predictedI, actualJ, predictedJ, penalty, iAndJAreBothInPredictedTopK, iAndJAreBothInActualTopK);
kendallsDistance += penalty;
}
}
return kendallsDistance;
}
private double checkCase1(final int actualI, final int predictedI, final int actualJ, final int predictedJ, double penalty, final boolean iAndJAreBothInPredictedTopK, final boolean iAndJAreBothInActualTopK) {
if (iAndJAreBothInActualTopK && iAndJAreBothInPredictedTopK) {
// case 1.1: if they are ranked the same in both topk lists: 0 penalty
boolean iIsBetterThanJInPredictedAndActualRanking = predictedI < predictedJ && actualI < actualJ;
boolean jIsBetterThanIInPredictedAndActualRanking = predictedI > predictedJ && actualI > actualJ;
if (iIsBetterThanJInPredictedAndActualRanking || jIsBetterThanIInPredictedAndActualRanking) {
penalty = 0;
}
// case 1.2 ranking mismatch in one of them
boolean iIsBetterThanJInPredictedButNotInActualRanking = predictedI < predictedJ && actualI > actualJ;
boolean jIsBetterThanIInPredictedButNotInActualRanking = predictedI > predictedJ && actualI < actualJ;
if (iIsBetterThanJInPredictedButNotInActualRanking || jIsBetterThanIInPredictedButNotInActualRanking) {
penalty = 1;
}
}
return penalty;
}
private double checkCase2(final int actualI, final int predictedI, final int actualJ, final int predictedJ, double penalty, final boolean iAndJAreBothInPredictedTopK, final boolean iAndJAreBothInActualTopK,
final boolean justIIsInPredictedTopK, final boolean justJIsInPredictedTopK, final boolean justIIsInActualTopK, final boolean justJIsInActualTopK) {
boolean bothPredictedAreInTopKButJustOneActual = (iAndJAreBothInPredictedTopK && justIIsInActualTopK) || (iAndJAreBothInPredictedTopK && justJIsInPredictedTopK);
boolean bothActualAreInTopKButJustOnePredicted = (iAndJAreBothInActualTopK && justIIsInPredictedTopK) || (iAndJAreBothInActualTopK && justJIsInPredictedTopK);
if (bothActualAreInTopKButJustOnePredicted) {
if (actualI < actualJ) {
// we know that actualI < actualJ < k
// if just i is in the predicted top k then we know that predictedI < predictedJ
if (justIIsInPredictedTopK) {
penalty = 0;
} else {
// predictedJ > predictedI
penalty = 1;
}
} else {
// actualJ < actualI
// if just j is in the predicted top k the predictedJ < predictedI
if (justJIsInPredictedTopK) {
penalty = 0;
} else {
penalty = 1;
}
}
}
if (bothPredictedAreInTopKButJustOneActual) {
if (predictedI < predictedJ) {
// again, we know that predictedI < predictedJ < k
// likewise, if the i of the actual ranking is in top k we are fine
if (justIIsInActualTopK) {
penalty = 0;
} else {
penalty = 1;
}
} else {
// predictedJ < predictedI < k
if (justJIsInActualTopK) {
penalty = 0;
} else {
penalty = 1;
}
}
}
return penalty;
}
private double checkCase3(double penalty, final boolean justIIsInPredictedTopK, final boolean justJIsInPredictedTopK, final boolean justIIsInActualTopK, final boolean justJIsInActualTopK) {
if (justIIsInActualTopK && justJIsInPredictedTopK) {
penalty = 1;
}
if (justJIsInActualTopK && justIIsInPredictedTopK) {
penalty = 1;
}
return penalty;
}
private double checkCase4(final int actualI, final int predictedI, final int actualJ, final int predictedJ, double penalty, final boolean iAndJAreBothInPredictedTopK, final boolean iAndJAreBothInActualTopK) {
boolean neitherIOrJAreInPredictedTopK = predictedI >= this.k && predictedJ >= this.k;
boolean neitherIOrJAreInActualTopK = actualI >= this.k && actualJ >= this.k;
if (iAndJAreBothInActualTopK && neitherIOrJAreInPredictedTopK) {
penalty = this.p;
}
if (iAndJAreBothInPredictedTopK && neitherIOrJAreInActualTopK) {
penalty = this.p;
}
return penalty;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/NDCGLoss.java
|
package ai.libs.jaicore.ml.ranking.loss;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.OptionalDouble;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.loss.IRankingPredictionPerformanceMeasure;
/**
* The Normalized Discounted Cumulative Gain for ranking.
*
* @author mbraun
* @author mwever
*
*/
public class NDCGLoss extends ARankingPredictionPerformanceMeasure implements IRankingPredictionPerformanceMeasure {
/**
* The position up to which to compute the cumulative gain (zero-indexed, exclusive).
*/
private int l;
private static final boolean BINARY_RELEVANCE = true;
/**
*
* @param l The position up to which to compute the cumulative gain (zero-indexed, exclusive).
*/
public NDCGLoss(final int l) {
super();
this.setL(l);
}
@Override
public double loss(final List<? extends IRanking<?>> expected, final List<? extends IRanking<?>> actual) {
OptionalDouble res = IntStream.range(0, expected.size()).mapToDouble(x -> this.loss(expected.get(0), actual.get(0))).average();
if (res.isPresent()) {
return res.getAsDouble();
}
throw new IllegalStateException("Could not aggregate kendalls tau of top k");
}
@Override
public double loss(final IRanking<?> expected, final IRanking<?> actual) {
if (expected.size() <= 1) {
throw new IllegalArgumentException("Dyad rankings must have length greater than 1.");
}
if (expected.size() != actual.size()) {
throw new IllegalArgumentException("Dyad rankings must have equal length.");
}
Map<Object, Integer> relevance = new HashMap<>();
for (int i = 0; i < this.l; i++) {
if (BINARY_RELEVANCE) {
relevance.put(expected.get(i), 1);
} else {
relevance.put(expected.get(i), -(i + 1));
}
}
double dcg = this.computeDCG(actual, relevance);
double idcg = this.computeDCG(expected, relevance);
if (dcg != 0) {
return idcg / dcg;
}
return 0;
}
private double computeDCG(final IRanking<?> ranking, final Map<Object, Integer> relevance) {
int length = ranking.size();
double dcg = 0;
for (int i = 0; i < length; i++) {
dcg += Math.pow(2, relevance.computeIfAbsent(ranking.get(i), t -> 0)) / this.log2(i + 2.0);
}
return dcg;
}
private double log2(final double x) {
return Math.log(x) / Math.log(2);
}
public int getL() {
return this.l;
}
public void setL(final int l) {
this.l = l;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/TopKOfPredicted.java
|
package ai.libs.jaicore.ml.ranking.loss;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.ranking.IRanking;
import org.api4.java.ai.ml.ranking.loss.IRankingPredictionPerformanceMeasure;
/**
* Calculates if the top-k dyads of the predicted ranking match the top-k dyads
* of the actual ranking. This ignores the rankings.
*
* @author mirkoj
* @author mwever
*
*/
public class TopKOfPredicted extends ARankingPredictionPerformanceMeasure implements IRankingPredictionPerformanceMeasure {
private int k;
/**
* Specifies the amount of top rankings to consider.
*
* @param k
*/
public TopKOfPredicted(final int k) {
this.k = k;
}
@Override
public double loss(final IRanking<?> actual, final IRanking<?> predicted) {
List<Object> topKDyads = new ArrayList<>();
// first derive the top k ranked dyads
for (int i = 0; i < this.k; i++) {
topKDyads.add(actual.get(i));
}
int incorrectNum = 0;
for (int i = 0; i < this.k; i++) {
Object topKDyadInPred = predicted.get(i);
if (!topKDyads.contains(topKDyadInPred)) {
incorrectNum++;
}
}
if (incorrectNum == 0) {
return 0.0d;
}
return ((double) incorrectNum / (double) this.k);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/loss/package-info.java
|
package ai.libs.jaicore.ml.ranking.loss;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/ranking/object/package-info.java
|
/**
* Object-Ranking package.
*
* @author mwever
*/
package ai.libs.jaicore.ml.ranking.object;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/package-info.java
|
package ai.libs.jaicore.ml.regression;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/learner/ConstantRegressor.java
|
package ai.libs.jaicore.ml.regression.learner;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import org.api4.java.ai.ml.regression.evaluation.IRegressionResultBatch;
import ai.libs.jaicore.ml.core.learner.ASupervisedLearner;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPrediction;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPredictionBatch;
public class ConstantRegressor extends ASupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>, IPrediction, IPredictionBatch> {
private Double constantValue;
public ConstantRegressor() {
}
@Override
public void fit(final ILabeledDataset<? extends ILabeledInstance> dTrain) throws TrainingException, InterruptedException {
Objects.requireNonNull(dTrain);
if (dTrain.isEmpty()) {
throw new IllegalArgumentException("Cannot train majority classifier with empty training set.");
}
this.constantValue = dTrain.stream().filter(x -> x.getLabel() != null).mapToDouble(x -> (double) x.getLabel()).average().getAsDouble();
}
@Override
public IRegressionPrediction predict(final ILabeledInstance xTest) throws PredictionException, InterruptedException {
return new SingleTargetRegressionPrediction(this.constantValue);
}
@Override
public IRegressionResultBatch predict(final ILabeledInstance[] dTest) throws PredictionException, InterruptedException {
List<IRegressionPrediction> preds = new ArrayList<>(dTest.length);
for (ILabeledInstance i : dTest) {
preds.add(this.predict(i));
}
return new SingleTargetRegressionPredictionBatch(preds);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/learner/package-info.java
|
package ai.libs.jaicore.ml.regression.learner;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/learner
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/learner/perceptron/package-info.java
|
package ai.libs.jaicore.ml.regression.learner.perceptron;
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/ERegressionPerformanceMeasure.java
|
package ai.libs.jaicore.ml.regression.loss;
import java.util.List;
import org.api4.java.ai.ml.core.evaluation.IPredictionAndGroundTruthTable;
import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanAbsoluteError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanAbsolutePercentageError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.R2;
import ai.libs.jaicore.ml.regression.loss.dataset.RootMeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.RootMeanSquaredLogarithmError;
public enum ERegressionPerformanceMeasure implements IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> {
MSE(new MeanSquaredError()), RMSE(new RootMeanSquaredError()), RMSLE(new RootMeanSquaredLogarithmError()), MAE(new MeanAbsoluteError()), MAPE(new MeanAbsolutePercentageError()), R2(new R2());
private final IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> measure;
private ERegressionPerformanceMeasure(final IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> measure) {
this.measure = measure;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return this.measure.loss(expected, predicted);
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return this.measure.score(expected, predicted);
}
@Override
public double loss(final IPredictionAndGroundTruthTable<? extends Double, ? extends IRegressionPrediction> pairTable) {
return this.measure.loss(pairTable);
}
@Override
public double score(final IPredictionAndGroundTruthTable<? extends Double, ? extends IRegressionPrediction> pairTable) {
return this.measure.score(pairTable);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/ERulPerformanceMeasure.java
|
package ai.libs.jaicore.ml.regression.loss;
import java.util.List;
import org.api4.java.ai.ml.core.evaluation.IPredictionAndGroundTruthTable;
import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.ml.regression.loss.dataset.AsymmetricLoss;
import ai.libs.jaicore.ml.regression.loss.dataset.AsymmetricLoss2;
import ai.libs.jaicore.ml.regression.loss.dataset.LinearMeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanAbsoluteError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanAbsolutePercentageError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanAsymmetricLoss2;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanPercentageError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanSquaredLogarithmicMeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.MeanSquaredPercentageError;
import ai.libs.jaicore.ml.regression.loss.dataset.QuadraticQuadraticError;
import ai.libs.jaicore.ml.regression.loss.dataset.RootMeanSquaredError;
import ai.libs.jaicore.ml.regression.loss.dataset.WeightedAbsoluteError;
import ai.libs.jaicore.ml.regression.loss.dataset.WeightedAsymmetricAbsoluteError;
public enum ERulPerformanceMeasure implements IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> {
ASYMMETRIC_LOSS(new AsymmetricLoss()), ASYMMETRIC_LOSS2(new AsymmetricLoss2()), MEAN_ASYMMETRIC_LOSS2(new MeanAsymmetricLoss2()), MEAN_PERCENTAGE_ERROR(new MeanPercentageError()),
MEAN_ABSOLUTE_PERCENTAGE_ERROR(new MeanAbsolutePercentageError()), MEAN_SQUARED_PERCENTAGE_ERROR(new MeanSquaredPercentageError()), MEAN_ABSOLUTE_ERROR(new MeanAbsoluteError()), ROOT_MEAN_SQUARED_ERROR(new RootMeanSquaredError()),
MEAN_SQUARED_ERROR(new MeanSquaredError()), WEIGHTED_ABSOLUTE_ERROR(new WeightedAbsoluteError()), WEIGHTED_ASYMMETRIC_ABSOLUTE_ERROR(new WeightedAsymmetricAbsoluteError()), LINEAR_MEAN_SQUARED_ERROR(new LinearMeanSquaredError()),
MEAN_SQUARED_LOGARITHMIC_MEAN_SQUARED_ERROR(new MeanSquaredLogarithmicMeanSquaredError()), QUADRATIC_QUADRATIC_ERROR(new QuadraticQuadraticError());
private final IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> measure;
private ERulPerformanceMeasure(final IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> measure) {
this.measure = measure;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return this.measure.loss(expected, predicted);
}
@Override
public double loss(final IPredictionAndGroundTruthTable<? extends Double, ? extends IRegressionPrediction> pairTable) {
return this.measure.loss(pairTable);
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return this.measure.score(expected, predicted);
}
@Override
public double score(final IPredictionAndGroundTruthTable<? extends Double, ? extends IRegressionPrediction> pairTable) {
return this.measure.score(pairTable);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/ARegressionMeasure.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import org.api4.java.ai.ml.core.evaluation.supervised.loss.IDeterministicPredictionPerformanceMeasure;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.ml.classification.loss.dataset.APredictionPerformanceMeasure;
public abstract class ARegressionMeasure extends APredictionPerformanceMeasure<Double, IRegressionPrediction> implements IDeterministicPredictionPerformanceMeasure<Double, IRegressionPrediction> {
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/AUnboundedRegressionMeasure.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
public abstract class AUnboundedRegressionMeasure extends ARegressionMeasure {
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return -this.score(expected, predicted);
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return -this.loss(expected, predicted);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/AbsoluteError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class AbsoluteError extends ARegressionMeasure {
private static final ai.libs.jaicore.ml.regression.loss.instance.AbsoluteError ABSOLUTE_ERROR_LOSS = new ai.libs.jaicore.ml.regression.loss.instance.AbsoluteError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return StatisticsUtil.sum(IntStream.range(0, expected.size()).mapToObj(x -> Double.valueOf(ABSOLUTE_ERROR_LOSS.loss(expected.get(x), predicted.get(x).getPrediction()))).collect(Collectors.toList()));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/AsymmetricLoss.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class AsymmetricLoss extends ARegressionMeasure {
private double dividerOverestimation = 5;
private double dividerUnderestimation = 20;
public AsymmetricLoss() {
}
public AsymmetricLoss(final double dividerUnderestimation, final double dividerOverestimation) {
this.dividerOverestimation = dividerUnderestimation;
this.dividerUnderestimation = dividerOverestimation;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
List<Double> accuracyList = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
Double percentageError = 100 * ((expected.get(i) - predicted.get(i).getPrediction()) / expected.get(i));
Double accuracy;
if (percentageError <= 0) {
accuracy = Math.exp(-Math.log(0.5) * (percentageError / this.dividerOverestimation));
} else {
accuracy = Math.exp(Math.log(0.5) * (percentageError / this.dividerUnderestimation));
}
accuracyList.add(accuracy);
}
return 1 - StatisticsUtil.mean(accuracyList);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/AsymmetricLoss2.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
public class AsymmetricLoss2 extends ARegressionMeasure {
private double dividerUnderestimation = 10;
private double dividerOverestimation = 13;
public AsymmetricLoss2() {
}
public AsymmetricLoss2(final double dividerUnderestimation, final double dividerOverestimation) {
this.dividerUnderestimation = dividerUnderestimation;
this.dividerOverestimation = dividerOverestimation;
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
double loss = 0;
for (int i = 0; i < expected.size(); i++) {
Double difference = predicted.get(i).getPrediction() - expected.get(i);
if (difference < 0) {
loss += Math.exp(-(difference / this.dividerUnderestimation)) - 1;
} else {
loss += Math.exp(difference / this.dividerOverestimation) - 1;
}
}
return loss;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/LinearMeanSquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class LinearMeanSquaredError extends AUnboundedRegressionMeasure {
private double weightUnderestimation = 1;
public LinearMeanSquaredError() {
}
public LinearMeanSquaredError(final double weightUnderestimation) {
this.weightUnderestimation = weightUnderestimation;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
List<Double> errors = new ArrayList<>();
double mse = new MeanSquaredError().loss(expected, predicted);
for (int i = 0; i < expected.size(); i++) {
Double difference = predicted.get(i).getPrediction() - expected.get(i);
Double error;
if (difference <= 0) {
error = -this.weightUnderestimation * difference;
} else {
error = mse;
}
errors.add(error);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanAbsoluteError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
import ai.libs.jaicore.ml.regression.loss.instance.AbsoluteError;
public class MeanAbsoluteError extends AUnboundedRegressionMeasure {
private static final AbsoluteError ABSOLUTE_ERROR_LOSS = new AbsoluteError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return StatisticsUtil.mean(IntStream.range(0, expected.size()).mapToObj(x -> Double.valueOf(ABSOLUTE_ERROR_LOSS.loss(expected.get(x), predicted.get(x).getPrediction()))).collect(Collectors.toList()));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanAbsolutePercentageError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class MeanAbsolutePercentageError extends ARegressionMeasure {
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
Double percentageError = (expected.get(i) - predicted.get(i).getPrediction()) / expected.get(i);
errors.add(Math.abs(percentageError));
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanAsymmetricLoss2.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class MeanAsymmetricLoss2 extends ARegressionMeasure {
private double dividerUnderestimation = 10;
private double dividerOverestimation = 13;
public MeanAsymmetricLoss2() {
}
public MeanAsymmetricLoss2(final double dividerUnderestimation, final double dividerOverestimation) {
this.dividerUnderestimation = dividerUnderestimation;
this.dividerOverestimation = dividerOverestimation;
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
List<Double> losses = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
Double difference = predicted.get(i).getPrediction() - expected.get(i);
double loss;
if (difference < 0) {
loss = Math.exp(-(difference / this.dividerUnderestimation)) - 1;
} else {
loss = Math.exp(difference / this.dividerOverestimation) - 1;
}
losses.add(loss);
}
return StatisticsUtil.mean(losses);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanPercentageError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class MeanPercentageError extends ARegressionMeasure {
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
Double percentageError = (expected.get(i) - predicted.get(i).getPrediction()) / expected.get(i);
errors.add(percentageError);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanSquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
import ai.libs.jaicore.ml.regression.loss.instance.SquaredError;
public class MeanSquaredError extends AUnboundedRegressionMeasure {
private static final SquaredError SQUARED_ERROR_LOSS = new SquaredError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
return StatisticsUtil.mean(IntStream.range(0, expected.size()).mapToObj(x -> Double.valueOf(SQUARED_ERROR_LOSS.loss(expected.get(x), predicted.get(x).getPrediction()))).collect(Collectors.toList()));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanSquaredLogarithmicError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
import ai.libs.jaicore.ml.regression.loss.instance.SquaredLogarithmicError;
public class MeanSquaredLogarithmicError extends ARegressionMeasure {
private static final SquaredLogarithmicError SQUARED_LOGARITHMIC_LOSS = new SquaredLogarithmicError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return StatisticsUtil.mean(IntStream.range(0, expected.size()).mapToObj(x -> Double.valueOf(SQUARED_LOGARITHMIC_LOSS.loss(expected.get(x), predicted.get(x).getPrediction()))).collect(Collectors.toList()));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanSquaredLogarithmicMeanSquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class MeanSquaredLogarithmicMeanSquaredError extends AUnboundedRegressionMeasure {
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
List<Double> errors = new ArrayList<>();
double msle = new MeanSquaredLogarithmicError().loss(expected, predicted);
double mse = new MeanSquaredError().loss(expected, predicted);
for (int i = 0; i < expected.size(); i++) {
Double difference = predicted.get(i).getPrediction() - expected.get(i);
Double error;
if (difference <= 0) {
error = msle;
} else {
error = mse;
}
errors.add(error);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/MeanSquaredPercentageError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class MeanSquaredPercentageError extends ARegressionMeasure {
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
Double percentageError = (expected.get(i) - predicted.get(i).getPrediction()) / expected.get(i);
errors.add(Math.pow(percentageError, 2));
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/QuadraticQuadraticError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class QuadraticQuadraticError extends AUnboundedRegressionMeasure {
private double weightUnderestimation = 1d;
public QuadraticQuadraticError() {
}
public QuadraticQuadraticError(final double weightUnderestimation) {
this.weightUnderestimation = weightUnderestimation;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
double difference = predicted.get(i).getPrediction() - expected.get(i);
Double error;
if (difference <= 0) {
error = 2 * this.weightUnderestimation * Math.pow(difference, 2);
} else {
error = 2 * (this.weightUnderestimation + (1 - (2 * this.weightUnderestimation))) * Math.pow(difference, 2);
}
errors.add(error);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/R2.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
/**
* The R^2, aka. the coefficient of determination describes the proportion of the variance in the target variable and the predicted values.
* The formula of R^2 is as follows:
* (\sum_i (y^\hat_i - \bar{y})^2) / (\sum_i (y_i - \bar{y})^2)
*
* For predictions not worse than prediting constantly the mean of the target variable, R^2 resides within the [0, 1] interval.
* Caution: For worse predictions the coefficient of determination becomes *negative*.
*
* @author mwever
*/
public class R2 extends ARegressionMeasure {
public R2() {
super();
// nothing to do here
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
double meanExpected = expected.stream().mapToDouble(x -> x).average().getAsDouble();
double sumOfActualSquares = 0.0;
double sumOfExpectedSquares = 0.0;
for (int i = 0; i < predicted.size(); i++) {
sumOfActualSquares += Math.pow(predicted.get(i).getPrediction() - meanExpected, 2);
sumOfExpectedSquares += Math.pow(expected.get(i) - meanExpected, 2);
}
if (sumOfExpectedSquares == 0.0) {
throw new IllegalStateException("Sum of expected squares must not be null.");
}
return sumOfActualSquares / sumOfExpectedSquares;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/RootMeanSquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
/**
* The root mean squared loss function.
* This loss function computes the sum of differences of expected/actual pairs,
* divides this by the number of observations, and takes the square root.
*
* @author mwever
*
*/
public class RootMeanSquaredError extends ARegressionMeasure {
private static final MeanSquaredError MEAN_SQUARED_ERROR_LOSS = new MeanSquaredError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return Math.sqrt(MEAN_SQUARED_ERROR_LOSS.loss(expected, predicted));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/RootMeanSquaredLogarithmError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
public class RootMeanSquaredLogarithmError extends ARegressionMeasure {
public RootMeanSquaredLogarithmError() {
super();
}
@Override
public double score(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
this.checkConsistency(expected, predicted);
return Math.sqrt(IntStream.range(0, expected.size()).mapToDouble(x -> predicted.get(x).getPrediction() - expected.get(x)) // error
.map(Math::log) // log
.map(x -> Math.pow(x, 2)) // squared
.average().getAsDouble() // mean
); // root
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/SquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class SquaredError extends ARegressionMeasure {
private static final ai.libs.jaicore.ml.regression.loss.instance.SquaredError SQUARED_ERROR_LOSS = new ai.libs.jaicore.ml.regression.loss.instance.SquaredError();
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
return StatisticsUtil.mean(IntStream.range(0, expected.size()).mapToObj(x -> Double.valueOf(SQUARED_ERROR_LOSS.loss(expected.get(x), predicted.get(x).getPrediction()))).collect(Collectors.toList()));
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/WeightedAbsoluteError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class WeightedAbsoluteError extends AUnboundedRegressionMeasure {
private double weightUnderestimation = 1d;
public WeightedAbsoluteError() {
}
public WeightedAbsoluteError(final double weightUnderestimation) {
this.weightUnderestimation = weightUnderestimation;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
double difference = predicted.get(i).getPrediction() - expected.get(i);
Double error;
if (difference <= 0) {
error = -this.weightUnderestimation * difference;
} else {
error = this.weightUnderestimation * difference;
}
errors.add(error);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/dataset/WeightedAsymmetricAbsoluteError.java
|
package ai.libs.jaicore.ml.regression.loss.dataset;
import java.util.ArrayList;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.basic.StatisticsUtil;
public class WeightedAsymmetricAbsoluteError extends AUnboundedRegressionMeasure {
private double weightUnderestimation = 1;
private double weightOverestimation = 1;
public WeightedAsymmetricAbsoluteError() {
}
public WeightedAsymmetricAbsoluteError(final double weightUnderestimation, final double weightOverestimation) {
this.weightUnderestimation = weightUnderestimation;
this.weightOverestimation = weightOverestimation;
}
@Override
public double loss(final List<? extends Double> expected, final List<? extends IRegressionPrediction> predicted) {
List<Double> errors = new ArrayList<>();
for (int i = 0; i < expected.size(); i++) {
double d = predicted.get(i).getPrediction() - expected.get(i);
Double error;
if (d <= 0) {
error = -this.weightUnderestimation * d;
} else {
error = this.weightOverestimation * d;
}
errors.add(error);
}
return StatisticsUtil.mean(errors);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/instance/AbsoluteError.java
|
package ai.libs.jaicore.ml.regression.loss.instance;
import ai.libs.jaicore.ml.classification.loss.instance.AInstanceMeasure;
public class AbsoluteError extends AInstanceMeasure<Double, Double> {
@Override
public double loss(final Double expected, final Double predicted) {
return Math.abs(expected - predicted);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/instance/SquaredError.java
|
package ai.libs.jaicore.ml.regression.loss.instance;
import ai.libs.jaicore.ml.classification.loss.instance.AInstanceMeasure;
/**
* Measure computing the squared error of two doubles. It can be used to compute the mean squared error.
*
* @author mwever
*/
public class SquaredError extends AInstanceMeasure<Double, Double> {
@Override
public double loss(final Double expected, final Double predicted) {
return Math.pow(expected - predicted, 2);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/loss/instance/SquaredLogarithmicError.java
|
package ai.libs.jaicore.ml.regression.loss.instance;
import ai.libs.jaicore.ml.classification.loss.instance.AInstanceMeasure;
public class SquaredLogarithmicError extends AInstanceMeasure<Double, Double> {
private static final double DEF_EPSILON = 1E-15;
private final double epsilon;
public SquaredLogarithmicError(final double epsilon) {
this.epsilon = epsilon;
}
public SquaredLogarithmicError() {
this(DEF_EPSILON);
}
@Override
public double loss(final Double expected, final Double predicted) {
return Math.pow(Math.log(this.clip(expected)) - Math.log(this.clip(predicted)), 2);
}
private double clip(final double value) {
if (value == 0.0) {
return this.epsilon;
}
return value;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/singlelabel/SingleTargetRegressionPrediction.java
|
package ai.libs.jaicore.ml.regression.singlelabel;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import ai.libs.jaicore.ml.core.evaluation.Prediction;
public class SingleTargetRegressionPrediction extends Prediction implements IRegressionPrediction {
public SingleTargetRegressionPrediction(final Object predicted) {
super(predicted);
}
@Override
public Double getPrediction() {
return this.getDoublePrediction();
}
@Override
public double getDoublePrediction() {
return (double) super.getPrediction();
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/regression/singlelabel/SingleTargetRegressionPredictionBatch.java
|
package ai.libs.jaicore.ml.regression.singlelabel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.api4.java.ai.ml.regression.evaluation.IRegressionPrediction;
import org.api4.java.ai.ml.regression.evaluation.IRegressionResultBatch;
public class SingleTargetRegressionPredictionBatch extends ArrayList<IRegressionPrediction> implements IRegressionResultBatch {
private static final long serialVersionUID = 1L;
public SingleTargetRegressionPredictionBatch(final Collection<IRegressionPrediction> predictions) {
this.addAll(predictions);
}
@Override
public int getNumPredictions() {
return this.size();
}
@Override
public List<? extends IRegressionPrediction> getPredictions() {
return this;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/AScikitLearnWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.aeonbits.owner.ConfigCache;
import org.aeonbits.owner.ConfigFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.DatasetCreationException;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.algorithm.Timeout;
import org.jtwig.JtwigModel;
import org.jtwig.JtwigTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.hash.Hashing;
import ai.libs.jaicore.basic.FileUtil;
import ai.libs.jaicore.basic.ResourceUtil;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
import ai.libs.jaicore.ml.core.dataset.serialization.ArffDatasetAdapter;
import ai.libs.jaicore.ml.core.learner.ASupervisedLearner;
import ai.libs.jaicore.processes.EOperatingSystem;
import ai.libs.jaicore.processes.ProcessIDNotRetrievableException;
import ai.libs.jaicore.processes.ProcessUtil;
import ai.libs.python.DefaultProcessListener;
import ai.libs.python.IPythonConfig;
import ai.libs.python.PythonRequirementDefinition;
public abstract class AScikitLearnWrapper<P extends IPrediction, B extends IPredictionBatch> extends ASupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>, P, B> implements IScikitLearnWrapper {
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_REL = 3;
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_MAJ = 5;
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_MIN = 0;
protected static final String[] PYTHON_REQUIRED_MODULES = { "arff", "numpy", "json", "pickle", "os", "sys", "warnings", "scipy", "sklearn", "pandas" };
protected static final String[] PYTHON_OPTIONAL_MODULES = {};
private static final String SCIKIT_LEARN_TEMPLATE = "sklearn/sklearn_template.twig.py";
private static final String COULD_NOT_RUN_SCIKIT_LEARN_MODEL = "Could not run scikit-learn model.";
protected Logger logger = LoggerFactory.getLogger(AScikitLearnWrapper.class);
protected IScikitLearnWrapperConfig scikitLearnWrapperConfig;
protected IPythonConfig pythonConfig = ConfigFactory.create(IPythonConfig.class);
protected final String configurationUID;
protected EScikitLearnProblemType problemType;
protected String pipeline;
private String imports;
protected File modelFile;
protected ILabeledDataset<ILabeledInstance> data;
protected int[] targetIndices;
protected long seed;
protected Timeout timeout;
private boolean listenToPidFromProcess; // If true, the PID is obtained from the python process being started by listening to according output.
protected AScikitLearnWrapper(final EScikitLearnProblemType problemType, final String pipeline, final String imports) throws IOException, InterruptedException {
this.problemType = problemType;
this.pipeline = pipeline;
this.imports = imports;
this.targetIndices = new int[0];
String hashCode = Hashing.sha256().hashString(this.pipeline, StandardCharsets.UTF_8).toString();
this.configurationUID = hashCode.startsWith("-") ? hashCode.replace("-", "1") : "0" + hashCode;
this.listenToPidFromProcess = (ProcessUtil.getOS() == EOperatingSystem.MAC || ProcessUtil.getOS() == EOperatingSystem.LINUX);
this.scikitLearnWrapperConfig = ConfigCache.getOrCreate(IScikitLearnWrapperConfig.class);
this.scikitLearnWrapperConfig.getTempFolder().mkdirs();
this.scikitLearnWrapperConfig.getModelDumpsDirectory().mkdirs();
new PythonRequirementDefinition(PYTHON_MINIMUM_REQUIRED_VERSION_REL, PYTHON_MINIMUM_REQUIRED_VERSION_MAJ, PYTHON_MINIMUM_REQUIRED_VERSION_MIN, ArrayUtils.addAll(PYTHON_REQUIRED_MODULES, problemType.getPythonRequiredModules()),
ArrayUtils.addAll(PYTHON_OPTIONAL_MODULES, problemType.getPythonOptionalModules())).check(this.pythonConfig);
this.setPythonTemplate(ResourceUtil.getResourceAsTempFile(SCIKIT_LEARN_TEMPLATE));
}
@Override
public void setPythonTemplate(final String pythonTemplatePath) throws IOException {
File scikitTemplate = new File(pythonTemplatePath);
File scriptFile = this.getSKLearnScriptFile();
if (!scriptFile.createNewFile()) {
this.logger.debug("Script file for configuration UID {} already exists in {}", this.configurationUID, scriptFile.getAbsolutePath());
}
if (this.scikitLearnWrapperConfig.getDeleteFileOnExit()) {
scriptFile.deleteOnExit();
}
if (this.pipeline == null || this.pipeline.isEmpty()) {
throw new AssertionError("Pipeline command for learner must be stated.");
}
Map<String, Object> templateValues = new HashMap<>();
templateValues.put("imports", this.imports != null ? this.imports : "");
templateValues.put("pipeline", this.pipeline);
JtwigTemplate template = JtwigTemplate.fileTemplate(scikitTemplate);
JtwigModel model = JtwigModel.newModel(templateValues);
template.render(model, new FileOutputStream(scriptFile));
}
@Override
public void setModelPath(final String modelPath) throws IOException {
this.modelFile = new File(modelPath);
}
@Override
public File getModelPath() {
return this.modelFile;
}
@Override
public void setTargetIndices(final int... targetIndices) {
this.targetIndices = targetIndices;
}
@Override
public void setSeed(final long seed) {
this.seed = seed;
}
@Override
public void setTimeout(final Timeout timeout) {
this.timeout = timeout;
}
@SuppressWarnings("unchecked")
@Override
public void fit(final ILabeledDataset<? extends ILabeledInstance> trainingData) throws TrainingException, InterruptedException {
try {
String dataFileName = this.getDataName(trainingData);
this.data = (ILabeledDataset<ILabeledInstance>) trainingData.createEmptyCopy();
if (this.doLabelsFitToProblemType(this.data)) {
File trainingDataFile = this.getOrWriteDataFile(trainingData, dataFileName);
this.fit(trainingDataFile, dataFileName);
} else {
throw new TrainingException("The label of the given data " + trainingData.getRelationName() + " are not suitable for the selected problem type " + this.problemType.getName());
}
} catch (DatasetCreationException | ScikitLearnWrapperExecutionFailedException e) {
throw new TrainingException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
@Override
public void fit(final String trainingDataName) throws TrainingException, InterruptedException {
File trainingDataFile = this.getDatasetFile(trainingDataName);
this.fit(trainingDataFile, trainingDataName);
}
private void fit(final File trainingDataFile, final String trainingDataName) throws TrainingException, InterruptedException {
try {
File outputFile = this.getOutputFile(trainingDataName);
if (!outputFile.exists()) {
this.modelFile = new File(this.scikitLearnWrapperConfig.getModelDumpsDirectory(), this.getModelFileName(trainingDataName));
String[] trainCommand = this.constructCommandLineParametersForFitMode(this.modelFile, trainingDataFile).toCommandArray();
if (this.logger.isDebugEnabled()) {
this.logger.debug("{} run train mode {}", Thread.currentThread().getName(), Arrays.toString(trainCommand));
}
this.runProcess(trainCommand);
}
} catch (ScikitLearnWrapperExecutionFailedException e) {
throw new TrainingException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
@Override
public B predict(final ILabeledDataset<? extends ILabeledInstance> testingData) throws PredictionException, InterruptedException {
try {
String testingDataName = this.getDataName(testingData);
File testingDataFile = this.getOrWriteDataFile(testingData, testingDataName);
this.logger.info("Prediction dataset serialized, now acquiring predictions.");
return this.predict(testingDataFile, testingDataName);
} catch (ScikitLearnWrapperExecutionFailedException e) {
throw new PredictionException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
public B predict(final String testingDataName) throws PredictionException, InterruptedException {
File testingDataFile = this.getDatasetFile(testingDataName);
return this.predict(testingDataFile, testingDataName);
}
private B predict(final File testingDataFile, final String testingDataName) throws PredictionException, InterruptedException {
try {
File outputFile = this.getOutputFile(testingDataName);
if (!outputFile.exists()) {
String[] testCommand = this.constructCommandLineParametersForPredictMode(this.modelFile, testingDataFile, outputFile).toCommandArray();
if (this.logger.isDebugEnabled()) {
this.logger.debug("Run test mode with {}", Arrays.toString(testCommand));
}
this.runProcess(testCommand);
}
return this.handleOutput(outputFile);
} catch (ScikitLearnWrapperExecutionFailedException | TrainingException e) {
throw new PredictionException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
@Override
public B predict(final ILabeledInstance[] testingInstances) throws PredictionException, InterruptedException {
Objects.requireNonNull(this.modelFile, "Model has not been trained.");
Objects.requireNonNull(this.data, "Model has not been trained.");
this.logger.info("Predicting {} instances.", testingInstances.length);
ILabeledDataset<ILabeledInstance> testingData;
try {
testingData = this.data.createEmptyCopy();
} catch (DatasetCreationException e1) {
throw new PredictionException("Could not replicate labeled dataset instance", e1);
}
Arrays.stream(testingInstances).forEach(testingData::add);
return this.predict(testingData);
}
@SuppressWarnings("unchecked")
@Override
public P predict(final ILabeledInstance instance) throws PredictionException, InterruptedException {
return (P) this.predict(new ILabeledInstance[] { instance }).get(0);
}
@SuppressWarnings("unchecked")
@Override
public B fitAndPredict(final ILabeledDataset<? extends ILabeledInstance> trainingData, final ILabeledDataset<? extends ILabeledInstance> testingData) throws TrainingException, PredictionException, InterruptedException {
try {
String trainingDataFileName = this.getDataName(trainingData);
this.data = (ILabeledDataset<ILabeledInstance>) trainingData.createEmptyCopy();
File trainingDataFile = this.getOrWriteDataFile(trainingData, trainingDataFileName);
String testingDataFileName = this.getDataName(testingData);
File testingDataFile = this.getOrWriteDataFile(testingData, testingDataFileName);
this.logger.info("Prediction dataset serialized, now acquiring predictions.");
return this.fitAndPredict(trainingDataFile, trainingDataFileName, testingDataFile, testingDataFileName);
} catch (DatasetCreationException | ScikitLearnWrapperExecutionFailedException e) {
throw new TrainingException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
public B fitAndPredict(final File trainingDataFile, final String trainingDataName, final File testingDataFile, final String testingDataName) throws TrainingException, PredictionException, InterruptedException {
try {
File trainingOutputFile = this.getOutputFile(trainingDataName);
File testingOutputFile = this.getOutputFile(testingDataName);
if (!trainingOutputFile.exists() && !testingOutputFile.exists()) {
String[] fitAndPredictCommand = this.constructCommandLineParametersForFitAndPredictMode(trainingDataFile, testingDataFile, testingOutputFile).toCommandArray();
if (this.logger.isDebugEnabled()) {
this.logger.debug("{} run fitAndPredict mode {}", Thread.currentThread().getName(), Arrays.toString(fitAndPredictCommand));
}
this.runProcess(fitAndPredictCommand);
}
return this.handleOutput(testingOutputFile);
} catch (ScikitLearnWrapperExecutionFailedException e) {
throw new TrainingException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
protected String getModelFileName(final String dataFileName) {
return this.configurationUID + "_" + dataFileName + this.scikitLearnWrapperConfig.getPickleFileExtension();
}
@Override
public String getDataName(final ILabeledDataset<? extends ILabeledInstance> data) {
String hash = "" + data.hashCode();
hash = hash.startsWith("-") ? hash.replace("-", "1") : "0" + hash;
return hash;
}
private synchronized File getOrWriteDataFile(final ILabeledDataset<? extends ILabeledInstance> dataset, final String dataFileName) throws ScikitLearnWrapperExecutionFailedException {
this.logger.debug("Serializing {}x{} dataset to {}", dataset.size(), dataset.getNumAttributes(), dataFileName);
File dataFile = this.getDatasetFile(dataFileName);
if (this.scikitLearnWrapperConfig.getDeleteFileOnExit()) {
dataFile.deleteOnExit();
}
if (dataFile.exists()) {
this.logger.debug("Reusing dataset: {}", dataFileName);
return dataFile;
}
try {
new ArffDatasetAdapter().serializeDataset(dataFile, dataset);
} catch (IOException e1) {
throw new ScikitLearnWrapperExecutionFailedException("Could not dump data file for prediction", e1);
}
this.logger.debug("Serializating completed.");
return dataFile;
}
private synchronized File getDatasetFile(final String datasetName) {
return new File(this.scikitLearnWrapperConfig.getTempFolder(), datasetName + ".arff");
}
protected abstract boolean doLabelsFitToProblemType(final ILabeledDataset<? extends ILabeledInstance> data);
protected ScikitLearnWrapperCommandBuilder getCommandBuilder() {
ScikitLearnWrapperCommandBuilder commandBuilder = new ScikitLearnWrapperCommandBuilder(this.problemType.getScikitLearnCommandLineFlag(), this.getSKLearnScriptFile());
return this.getCommandBuilder(commandBuilder);
}
protected ScikitLearnWrapperCommandBuilder getCommandBuilder(final ScikitLearnWrapperCommandBuilder commandBuilder) {
commandBuilder.withLogger(this.logger);
commandBuilder.withSeed(this.seed);
commandBuilder.withTimeout(this.timeout);
if (this.pythonConfig != null) {
commandBuilder.withPythonConfig(this.pythonConfig);
}
return commandBuilder;
}
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitMode(final File modelFile, final File trainingDataFile) {
ScikitLearnWrapperCommandBuilder commandBuilder = this.getCommandBuilder();
commandBuilder.withFitMode();
commandBuilder.withModelFile(modelFile);
commandBuilder.withFitDataFile(trainingDataFile);
commandBuilder.withTargetIndices(this.targetIndices);
return commandBuilder;
}
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForPredictMode(final File modelFile, final File testingDataFile, final File outputFile) {
ScikitLearnWrapperCommandBuilder commandBuilder = this.getCommandBuilder();
commandBuilder.withPredictMode();
commandBuilder.withModelFile(modelFile);
commandBuilder.withPredictDataFile(testingDataFile);
commandBuilder.withTargetIndices(this.targetIndices);
commandBuilder.withPredictOutputFile(outputFile);
return commandBuilder;
}
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitAndPredictMode(final File trainingDataFile, final File testingDataFile, final File testingOutputFile) {
ScikitLearnWrapperCommandBuilder commandBuilder = this.getCommandBuilder();
commandBuilder.withFitAndPredictMode();
commandBuilder.withFitDataFile(trainingDataFile);
commandBuilder.withPredictDataFile(testingDataFile);
commandBuilder.withPredictOutputFile(testingOutputFile);
commandBuilder.withTargetIndices(this.targetIndices);
return commandBuilder;
}
private void runProcess(final String[] commandLineParameters) throws InterruptedException, ScikitLearnWrapperExecutionFailedException {
DefaultProcessListener listener = new DefaultProcessListener(this.listenToPidFromProcess);
try {
listener.setLoggerName(this.logger.getName() + ".python");
this.logger.debug("Set logger name of listener to {}. Now starting python process.", listener.getLoggerName());
if (this.logger.isDebugEnabled()) {
String call = Arrays.toString(commandLineParameters).replace(",", "");
this.logger.info("Starting process {}", call.substring(1, call.length() - 1));
}
ProcessBuilder processBuilder = new ProcessBuilder(commandLineParameters).directory(this.scikitLearnWrapperConfig.getTempFolder());
Process process = processBuilder.start();
this.logger.debug("Started process with PID: {}. Listener is {}", ProcessUtil.getPID(process), listener);
this.logger.info("Attaching listener {} to process {}", listener, process);
listener.listenTo(process);
this.logger.info("Listener attached.");
if (!listener.getErrorOutput().isEmpty()) {
if (listener.getErrorOutput().toLowerCase().contains("convergence")) {
// ignore convergence warning
this.logger.warn("Learner {} could not converge. Consider increase number of iterations.", this.pipeline);
} else {
throw new ScikitLearnWrapperExecutionFailedException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL);
}
}
} catch (InterruptedException e) {
throw e;
} catch (ProcessIDNotRetrievableException e) {
this.logger.warn("Could not retrieve process ID.");
} catch (Exception e) {
throw new ScikitLearnWrapperExecutionFailedException(COULD_NOT_RUN_SCIKIT_LEARN_MODEL, e);
}
}
@Override
public File getOutputFile(final String dataName) {
return new File(this.scikitLearnWrapperConfig.getModelDumpsDirectory(), this.configurationUID + "_" + dataName + this.scikitLearnWrapperConfig.getResultFileExtension());
}
protected abstract B handleOutput(final File outputFile) throws PredictionException, TrainingException;
@SuppressWarnings("unchecked")
protected List<List<Double>> getRawPredictionResults(final File outputFile) throws PredictionException {
String fileContent = "";
List<List<Double>> rawLastPredictionResults;
try {
/* Parse the result */
fileContent = FileUtil.readFileAsString(outputFile);
if (this.scikitLearnWrapperConfig.getDeleteFileOnExit()) {
Files.delete(outputFile.toPath());
}
ObjectMapper objMapper = new ObjectMapper();
rawLastPredictionResults = objMapper.readValue(fileContent, List.class);
} catch (IOException e) {
throw new PredictionException("Could not read result file or parse the json content to a list.", e);
}
if (this.logger.isInfoEnabled()) {
this.logger.info("{}", rawLastPredictionResults.stream().flatMap(List::stream).collect(Collectors.toList()));
}
return rawLastPredictionResults;
}
@Override
public void setPythonConfig(final IPythonConfig pythonConfig) {
this.pythonConfig = pythonConfig;
}
@Override
public void setScikitLearnWrapperConfig(final IScikitLearnWrapperConfig scikitLearnWrapperConfig) {
this.scikitLearnWrapperConfig = scikitLearnWrapperConfig;
}
@Override
public File getSKLearnScriptFile() {
return new File(this.scikitLearnWrapperConfig.getTempFolder(), this.configurationUID + this.scikitLearnWrapperConfig.getPythonFileExtension());
}
@Override
public File getModelFile() {
return this.modelFile;
}
@Override
public String getLoggerName() {
return this.logger.getName();
}
@Override
public void setLoggerName(final String name) {
this.logger = LoggerFactory.getLogger(name);
}
@Override
public String toString() {
return this.pipeline;
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/IScikitLearnWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.IOException;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.ai.ml.core.learner.ISupervisedLearner;
import org.api4.java.algorithm.Timeout;
import org.api4.java.common.control.ILoggingCustomizable;
import ai.libs.python.IPythonConfig;
/**
* Handles the execution of a scikit-learn pipeline in python and makes the according predictions available. A scikit-learn pipeline is a composition of one or multiple (ML) algorithms.
*
* @see <a href="https://scikit-learn.org/stable/modules/compose.html">scikit-learn: Pipelines and composite estimators</a>
*
* @author tornede
*
*/
public interface IScikitLearnWrapper extends ISupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>>, ILoggingCustomizable {
public void setPythonTemplate(final String pythonTemplatePath) throws IOException;
public void setModelPath(final String modelPath) throws IOException;
public File getModelPath();
public void setSeed(final long seed);
public void setTimeout(final Timeout timeout);
public void fit(final String trainingDataName) throws TrainingException, InterruptedException;
public String getDataName(final ILabeledDataset<? extends ILabeledInstance> data);
public File getOutputFile(final String dataName);
public void setPythonConfig(final IPythonConfig pythonConfig) throws IOException, InterruptedException;
public void setScikitLearnWrapperConfig(final IScikitLearnWrapperConfig scikitLearnWrapperConfig);
public File getSKLearnScriptFile();
public File getModelFile();
public void setTargetIndices(int... targetIndices);
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/IScikitLearnWrapperConfig.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import org.aeonbits.owner.Config.Sources;
import ai.libs.python.IPythonConfig;
@Sources({ "file:conf/scikitlearn_wrapper.properties" })
public interface IScikitLearnWrapperConfig extends IPythonConfig {
public static final String K_TEMP_FOLDER = "sklearn.wrapper.temp.folder";
public static final String DEF_TEMP_FOLDER = "tmp";
@Key("sklearn.wrapper.python.extension")
@DefaultValue(".py")
public String getPythonFileExtension();
@Key("sklearn.wrapper.pickle.extension")
@DefaultValue(".pcl")
public String getPickleFileExtension();
@Key("sklearn.wrapper.result.extension")
@DefaultValue(".json")
public String getResultFileExtension();
@Key("sklearn.wrapper.temp.delete_on_exit")
@DefaultValue("false")
public boolean getDeleteFileOnExit();
@Key(K_TEMP_FOLDER)
@DefaultValue(DEF_TEMP_FOLDER)
public File getTempFolder();
@Key("sklearn.wrapper.temp.dump_folder_name")
@DefaultValue("model_dumps")
public String getModelDumpsDirectoryName();
default File getModelDumpsDirectory() {
return new File(this.getTempFolder(), this.getModelDumpsDirectoryName());
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnClassificationWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import org.api4.java.ai.ml.classification.singlelabel.evaluation.ISingleLabelClassification;
import org.api4.java.ai.ml.classification.singlelabel.evaluation.ISingleLabelClassificationPredictionBatch;
import org.api4.java.ai.ml.core.dataset.schema.attribute.ICategoricalAttribute;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import ai.libs.jaicore.ml.classification.singlelabel.SingleLabelClassification;
import ai.libs.jaicore.ml.classification.singlelabel.SingleLabelClassificationPredictionBatch;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
public class ScikitLearnClassificationWrapper extends AScikitLearnWrapper<ISingleLabelClassification, ISingleLabelClassificationPredictionBatch> {
public ScikitLearnClassificationWrapper(final String pipeline, final String imports) throws IOException, InterruptedException {
super(EScikitLearnProblemType.CLASSIFICATION, pipeline, imports);
}
@Override
protected boolean doLabelsFitToProblemType(final ILabeledDataset<? extends ILabeledInstance> data) {
return data.getLabelAttribute() instanceof ICategoricalAttribute;
}
@Override
protected ISingleLabelClassificationPredictionBatch handleOutput(final File outputFile) throws PredictionException, TrainingException {
List<List<Double>> rawLastPredictionResults = this.getRawPredictionResults(outputFile);
if (!rawLastPredictionResults.isEmpty()) {
if (rawLastPredictionResults.get(0).size() == 1) {
int numClasses = ((ICategoricalAttribute) this.data.getLabelAttribute()).getLabels().size();
return new SingleLabelClassificationPredictionBatch(rawLastPredictionResults.stream().flatMap(List::stream).map(x -> new SingleLabelClassification(numClasses, x.intValue())).collect(Collectors.toList()));
}
return new SingleLabelClassificationPredictionBatch(rawLastPredictionResults.stream().map(x -> x.stream().mapToDouble(y -> y).toArray()).map(SingleLabelClassification::new).collect(Collectors.toList()));
}
throw new PredictionException("Reading the output file lead to empty predictions.");
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnMultiTargetRegressionWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
import ai.libs.jaicore.ml.core.dataset.schema.attribute.NumericAttribute;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPrediction;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPredictionBatch;
public class ScikitLearnMultiTargetRegressionWrapper<P extends IPrediction, B extends IPredictionBatch> extends AScikitLearnWrapper<P, B> {
public ScikitLearnMultiTargetRegressionWrapper(final String pipeline, final String imports, final int[] targetIndices) throws IOException, InterruptedException {
super(EScikitLearnProblemType.REGRESSION, pipeline, imports);
this.targetIndices = targetIndices;
}
protected ScikitLearnMultiTargetRegressionWrapper(final EScikitLearnProblemType problemType, final String pipeline, final String imports) throws IOException, InterruptedException {
super(problemType, pipeline, imports);
}
@Override
protected boolean doLabelsFitToProblemType(final ILabeledDataset<? extends ILabeledInstance> data) {
for (int i = 0; i < this.targetIndices.length - 1; i++) {
if (!(data.getAttribute(this.targetIndices[i]) instanceof NumericAttribute)) {
return false;
}
}
return data.getLabelAttribute() instanceof NumericAttribute;
}
@Override
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitMode(final File modelFile, final File trainingDataFile) {
ScikitLearnWrapperCommandBuilder commandLineBuilder = super.constructCommandLineParametersForFitMode(modelFile, trainingDataFile);
commandLineBuilder.withTargetIndices(this.targetIndices);
return commandLineBuilder;
}
@Override
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForPredictMode(final File modelFile, final File testingDataFile, final File outputFile) {
ScikitLearnWrapperCommandBuilder commandLineBuilder = super.constructCommandLineParametersForPredictMode(modelFile, testingDataFile, outputFile);
commandLineBuilder.withTargetIndices(this.targetIndices);
return commandLineBuilder;
}
@Override
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitAndPredictMode(final File trainingDataFile, final File testingDataFile, final File testingOutputFile) {
ScikitLearnWrapperCommandBuilder commandLineBuilder = super.constructCommandLineParametersForFitAndPredictMode(trainingDataFile, testingDataFile, testingOutputFile);
commandLineBuilder.withTargetIndices(this.targetIndices);
return commandLineBuilder;
}
@SuppressWarnings("unchecked")
@Override
protected B handleOutput(final File outputFile) throws PredictionException, TrainingException {
List<List<Double>> rawLastPredictionResults = this.getRawPredictionResults(outputFile);
if (!rawLastPredictionResults.isEmpty()) {
return (B) new SingleTargetRegressionPredictionBatch(rawLastPredictionResults.stream().flatMap(List::stream).map(x -> new SingleTargetRegressionPrediction((double) x)).collect(Collectors.toList()));
}
throw new PredictionException("Reading the output file lead to empty predictions.");
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnRegressionWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.stream.Collectors;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
import ai.libs.jaicore.ml.core.dataset.schema.attribute.NumericAttribute;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPrediction;
import ai.libs.jaicore.ml.regression.singlelabel.SingleTargetRegressionPredictionBatch;
public class ScikitLearnRegressionWrapper<P extends IPrediction, B extends IPredictionBatch> extends AScikitLearnWrapper<P, B> {
public ScikitLearnRegressionWrapper(final String pipeline, final String imports) throws IOException, InterruptedException {
super(EScikitLearnProblemType.REGRESSION, pipeline, imports);
}
protected ScikitLearnRegressionWrapper(final EScikitLearnProblemType problemType, final String pipeline, final String imports) throws IOException, InterruptedException {
super(problemType, pipeline, imports);
}
@Override
protected boolean doLabelsFitToProblemType(final ILabeledDataset<? extends ILabeledInstance> data) {
return data.getLabelAttribute() instanceof NumericAttribute;
}
@SuppressWarnings("unchecked")
@Override
protected B handleOutput(final File outputFile) throws PredictionException, TrainingException {
List<List<Double>> rawLastPredictionResults = this.getRawPredictionResults(outputFile);
if (!rawLastPredictionResults.isEmpty()) {
return (B) new SingleTargetRegressionPredictionBatch(rawLastPredictionResults.stream().flatMap(List::stream).map(x -> new SingleTargetRegressionPrediction((double) x)).collect(Collectors.toList()));
}
throw new PredictionException("Reading the output file lead to empty predictions.");
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnTimeSeriesFeatureEngineeringWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.io.IOException;
import java.util.Objects;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import ai.libs.jaicore.basic.FileUtil;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
public class ScikitLearnTimeSeriesFeatureEngineeringWrapper<P extends IPrediction, B extends IPredictionBatch> extends AScikitLearnWrapper<P, B> {
public ScikitLearnTimeSeriesFeatureEngineeringWrapper(final String pipeline, final String imports) throws IOException, InterruptedException {
super(EScikitLearnProblemType.TIME_SERIES_FEATURE_ENGINEERING, pipeline, imports);
}
@Override
protected boolean doLabelsFitToProblemType(final ILabeledDataset<? extends ILabeledInstance> data) {
return true;
}
@Override
public String getDataName(final ILabeledDataset<? extends ILabeledInstance> data) {
return data.getRelationName();
}
@Override
public File getOutputFile(final String dataName) {
return new File(this.scikitLearnWrapperConfig.getTempFolder(), this.configurationUID + "_" + dataName + ".arff");
}
@Override
protected ScikitLearnWrapperCommandBuilder getCommandBuilder() {
ScikitLearnTimeSeriesFeatureEngineeringWrapperCommandBuilder commandBuilder = new ScikitLearnTimeSeriesFeatureEngineeringWrapperCommandBuilder(this.problemType.getScikitLearnCommandLineFlag(), this.getSKLearnScriptFile());
return super.getCommandBuilder(commandBuilder);
}
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitMode(final File modelFile, final File trainingDataFile, final File outputFile) {
return super.constructCommandLineParametersForFitMode(modelFile, trainingDataFile).withFitOutputFile(outputFile);
}
protected ScikitLearnWrapperCommandBuilder constructCommandLineParametersForFitAndPredictMode(final File trainingDataFile, final File trainingOutputFile, final File testingDataFile, final File testingOutputFile) {
return super.constructCommandLineParametersForFitAndPredictMode(trainingDataFile, testingDataFile, testingOutputFile).withFitOutputFile(trainingOutputFile);
}
@Override
protected B handleOutput(final File outputFile) throws TrainingException {
if (!outputFile.exists()) {
FileUtil.touch(outputFile.getAbsolutePath());
throw new TrainingException("Executing python failed.");
}
return null;
}
protected B handleOutput(final File fitOutputFile, final File predictOutputFile) throws PredictionException, TrainingException {
this.handleOutput(fitOutputFile);
this.handleOutput(predictOutputFile);
return null;
}
class ScikitLearnTimeSeriesFeatureEngineeringWrapperCommandBuilder extends ScikitLearnWrapperCommandBuilder {
protected ScikitLearnTimeSeriesFeatureEngineeringWrapperCommandBuilder(final String problemTypeFlag, final File scriptFile) {
super(problemTypeFlag, scriptFile);
}
@Override
protected void checkRequirementsTrainMode() {
Objects.requireNonNull(this.fitDataFile);
Objects.requireNonNull(this.modelFile);
Objects.requireNonNull(this.fitOutputFile);
}
@Override
protected void checkRequirementsTrainTestMode() {
Objects.requireNonNull(this.fitDataFile);
Objects.requireNonNull(this.fitOutputFile);
Objects.requireNonNull(this.predictDataFile);
Objects.requireNonNull(this.predictOutputFile);
}
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnTimeSeriesRegressionWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.IOException;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import ai.libs.jaicore.ml.core.EScikitLearnProblemType;
public class ScikitLearnTimeSeriesRegressionWrapper<P extends IPrediction, B extends IPredictionBatch> extends ScikitLearnRegressionWrapper<P, B> {
public ScikitLearnTimeSeriesRegressionWrapper(final String pipeline, final String imports) throws IOException, InterruptedException {
super(EScikitLearnProblemType.TIME_SERIES_REGRESSION, pipeline, imports);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnWrapperCommandBuilder.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.StringJoiner;
import org.api4.java.algorithm.Timeout;
import org.slf4j.Logger;
import ai.libs.jaicore.processes.EOperatingSystem;
import ai.libs.jaicore.processes.ProcessUtil;
import ai.libs.python.IPythonConfig;
import ai.libs.python.PythonUtil;
public class ScikitLearnWrapperCommandBuilder {
private Logger logger;
private enum EWrapperExecutionMode {
FIT("fit"), PREDICT("predict"), FIT_AND_PREDICT("fitAndPredict");
private String name;
private EWrapperExecutionMode(final String name) {
this.name = name;
}
@Override
public String toString() {
return this.name;
}
}
private static final String PROBLEM_FLAG = "--problem";
private static final String MODE_FLAG = "--mode";
private static final String MODEL_FLAG = "--model";
private static final String FIT_DATA_FLAG = "--fit";
private static final String FIT_OUTPUT_FLAG = "--fitOutput";
private static final String PREDICT_DATA_FLAG = "--predict";
private static final String PREDICT_OUTPUT_FLAG = "--predictOutput";
private static final String TARGETS_FLAG = "--targets";
private static final String SEED_FLAG = "--seed";
private IPythonConfig pythonConfiguration;
private String problemTypeFlag;
private File scriptFile;
private EWrapperExecutionMode executionMode;
protected String modelFile;
protected String fitDataFile;
protected String fitOutputFile;
protected String predictDataFile;
protected String predictOutputFile;
private int[] targetIndices;
private long seed;
private Timeout timeout;
protected List<String> additionalParameters;
protected ScikitLearnWrapperCommandBuilder(final String problemTypeFlag, final File scriptFile) {
this.problemTypeFlag = problemTypeFlag;
this.scriptFile = scriptFile;
}
public ScikitLearnWrapperCommandBuilder withPythonConfig(final IPythonConfig pythonConfiguration) {
this.pythonConfiguration = pythonConfiguration;
return this;
}
public ScikitLearnWrapperCommandBuilder withLogger(final Logger logger) {
this.logger = logger;
return this;
}
public ScikitLearnWrapperCommandBuilder withScriptFile(final File scriptFile) {
this.scriptFile = scriptFile;
return this;
}
private ScikitLearnWrapperCommandBuilder withMode(final EWrapperExecutionMode executionMode) {
this.executionMode = executionMode;
return this;
}
public ScikitLearnWrapperCommandBuilder withFitMode() {
return this.withMode(EWrapperExecutionMode.FIT);
}
public ScikitLearnWrapperCommandBuilder withPredictMode() {
return this.withMode(EWrapperExecutionMode.PREDICT);
}
public ScikitLearnWrapperCommandBuilder withFitAndPredictMode() {
return this.withMode(EWrapperExecutionMode.FIT_AND_PREDICT);
}
public ScikitLearnWrapperCommandBuilder withModelFile(final File modelFile) {
this.modelFile = modelFile.getAbsolutePath();
return this;
}
public ScikitLearnWrapperCommandBuilder withFitDataFile(final File trainDataFile) {
if (!trainDataFile.getAbsoluteFile().exists()) {
throw new IllegalArgumentException("Data file does not exist: " + trainDataFile.getAbsolutePath());
}
this.fitDataFile = trainDataFile.getAbsolutePath();
return this;
}
public ScikitLearnWrapperCommandBuilder withFitOutputFile(final File outputFile) {
this.fitOutputFile = outputFile.getAbsolutePath();
return this;
}
public ScikitLearnWrapperCommandBuilder withPredictDataFile(final File testDataFile) {
this.predictDataFile = testDataFile.getAbsolutePath();
return this;
}
public ScikitLearnWrapperCommandBuilder withPredictOutputFile(final File outputFile) {
this.predictOutputFile = outputFile.getAbsolutePath();
return this;
}
public ScikitLearnWrapperCommandBuilder withTargetIndices(final int... targetIndices) {
this.targetIndices = targetIndices;
return this;
}
public ScikitLearnWrapperCommandBuilder withSeed(final long seed) {
this.seed = seed;
return this;
}
public ScikitLearnWrapperCommandBuilder withTimeout(final Timeout timeout) {
this.timeout = timeout;
return this;
}
public ScikitLearnWrapperCommandBuilder withAdditionalCommandLineParameters(final List<String> additionalCommandLineParameters) {
this.additionalParameters = additionalCommandLineParameters;
return this;
}
public void checkRequirements() {
if (!this.scriptFile.exists()) {
throw new IllegalArgumentException("The wrapped sklearn script " + this.scriptFile.getAbsolutePath() + " file does not exist");
}
Objects.requireNonNull(this.problemTypeFlag);
Objects.requireNonNull(this.executionMode);
switch (this.executionMode) {
case FIT:
this.checkRequirementsTrainMode();
break;
case PREDICT:
this.checkRequirementsTestMode();
break;
case FIT_AND_PREDICT:
this.checkRequirementsTrainTestMode();
break;
}
}
protected void checkRequirementsTrainMode() {
Objects.requireNonNull(this.fitDataFile);
Objects.requireNonNull(this.modelFile);
Objects.requireNonNull(this.targetIndices);
}
protected void checkRequirementsTestMode() {
Objects.requireNonNull(this.modelFile);
Objects.requireNonNull(this.predictDataFile);
Objects.requireNonNull(this.predictOutputFile);
Objects.requireNonNull(this.targetIndices);
}
protected void checkRequirementsTrainTestMode() {
Objects.requireNonNull(this.fitDataFile);
Objects.requireNonNull(this.predictDataFile);
Objects.requireNonNull(this.predictOutputFile);
Objects.requireNonNull(this.targetIndices);
}
public String[] toCommandArray() {
this.checkRequirements();
List<String> processParameters = new ArrayList<>();
EOperatingSystem os = ProcessUtil.getOS();
if (this.timeout != null && os == EOperatingSystem.LINUX) {
this.logger.info("Executing with timeout {}s", this.timeout.seconds());
processParameters.add("timeout");
processParameters.add(this.timeout.seconds() - 2 + "");
}
processParameters.add("-u"); // Force python to run stdout and stderr unbuffered.
processParameters.add(this.scriptFile.getAbsolutePath());
processParameters.addAll(Arrays.asList(PROBLEM_FLAG, this.problemTypeFlag));
processParameters.addAll(Arrays.asList(MODE_FLAG, this.executionMode.toString()));
if (this.modelFile != null) {
processParameters.addAll(Arrays.asList(MODEL_FLAG, this.modelFile));
}
if (this.fitDataFile != null) {
processParameters.addAll(Arrays.asList(FIT_DATA_FLAG, this.fitDataFile));
}
if (this.fitOutputFile != null) {
processParameters.addAll(Arrays.asList(FIT_OUTPUT_FLAG, this.fitOutputFile));
}
if (this.predictDataFile != null) {
processParameters.addAll(Arrays.asList(PREDICT_DATA_FLAG, this.predictDataFile));
}
if (this.predictOutputFile != null) {
processParameters.addAll(Arrays.asList(PREDICT_OUTPUT_FLAG, this.predictOutputFile));
}
processParameters.addAll(Arrays.asList(SEED_FLAG, String.valueOf(this.seed)));
if (this.targetIndices != null && this.targetIndices.length > 0) {
processParameters.addAll(Arrays.asList(TARGETS_FLAG, Arrays.toString(this.targetIndices).replaceAll("\\s+", "")));
}
if (this.additionalParameters != null) {
processParameters.addAll(this.additionalParameters);
}
StringJoiner stringJoiner = new StringJoiner(" ");
for (String parameter : processParameters) {
stringJoiner.add(parameter);
}
return new PythonUtil(this.pythonConfiguration).getExecutableCommandArray(false, stringJoiner.toString());
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/ScikitLearnWrapperExecutionFailedException.java
|
package ai.libs.jaicore.ml.scikitwrapper;
import org.api4.java.algorithm.exceptions.AlgorithmException;
public class ScikitLearnWrapperExecutionFailedException extends AlgorithmException {
private static final long serialVersionUID = -3658570286117660941L;
/**
* Creates a new {@link ScikitLearnWrapperExecutionFailedException} with the given parameters.
*
* @param message The message of this {@link Exception}.
* @param cause The underlying cause of this {@link Exception}.
*/
public ScikitLearnWrapperExecutionFailedException(final String message, final Throwable cause) {
super(message, cause);
}
/**
* Creates a new {@link ScikitLearnWrapperExecutionFailedException} with the given parameters.
*
* @param message The message of this {@link Exception}.
*/
public ScikitLearnWrapperExecutionFailedException(final String message) {
super(message);
}
}
|
0
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper
|
java-sources/ai/libs/jaicore-ml/0.2.7/ai/libs/jaicore/ml/scikitwrapper/simple/ASimpleScikitLearnWrapper.java
|
package ai.libs.jaicore.ml.scikitwrapper.simple;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.aeonbits.owner.ConfigFactory;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledDataset;
import org.api4.java.ai.ml.core.dataset.supervised.ILabeledInstance;
import org.api4.java.ai.ml.core.evaluation.IPrediction;
import org.api4.java.ai.ml.core.evaluation.IPredictionBatch;
import org.api4.java.ai.ml.core.exception.DatasetCreationException;
import org.api4.java.ai.ml.core.exception.PredictionException;
import org.api4.java.ai.ml.core.exception.TrainingException;
import org.api4.java.algorithm.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.basic.ResourceUtil;
import ai.libs.jaicore.ml.core.dataset.serialization.ArffDatasetAdapter;
import ai.libs.jaicore.ml.core.learner.ASupervisedLearner;
import ai.libs.jaicore.ml.scikitwrapper.IScikitLearnWrapper;
import ai.libs.jaicore.ml.scikitwrapper.IScikitLearnWrapperConfig;
import ai.libs.jaicore.ml.scikitwrapper.ScikitLearnWrapperExecutionFailedException;
import ai.libs.python.IPythonConfig;
import ai.libs.python.PythonRequirementDefinition;
import ai.libs.python.PythonUtil;
public abstract class ASimpleScikitLearnWrapper<P extends IPrediction, B extends IPredictionBatch> extends ASupervisedLearner<ILabeledInstance, ILabeledDataset<? extends ILabeledInstance>, P, B> implements IScikitLearnWrapper {
// logging
private Logger logger = LoggerFactory.getLogger(ASimpleScikitLearnWrapper.class);
private static final String LOG_SERIALIZATION_NOT_IMPLEMENTED = "The simple scikit-learn classifier wrapper does not support model serialization.";
// python requirements
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_REL = 3;
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_MAJ = 5;
public static final int PYTHON_MINIMUM_REQUIRED_VERSION_MIN = 0;
protected static final String[] PYTHON_REQUIRED_MODULES = { "arff", "numpy", "json", "pickle", "os", "sys", "warnings", "scipy", "sklearn", "pandas" };
protected static final String[] PYTHON_OPTIONAL_MODULES = {};
private static Boolean pythonRequirementsFulfilled = null;
// configurables
private static File tempDir = null;
private String pathExecutableTemplate = "sklearn/sklearn_template_windows.twig.py";
protected IScikitLearnWrapperConfig sklearnClassifierConfig = ConfigFactory.create(IScikitLearnWrapperConfig.class);
protected IPythonConfig pythonC;
private PythonUtil putil;
// variables of the object
protected final String problem;
protected final String constructorCall;
protected final String imports;
// temporary files
private File executable = null;
private File outputFile = null;
// temporary data
protected ILabeledDataset<? extends ILabeledInstance> trainingData;
protected ASimpleScikitLearnWrapper(final String constructorCall, final String imports, final String problem) throws IOException, InterruptedException {
this(constructorCall, imports, problem, ConfigFactory.create(IPythonConfig.class));
}
protected ASimpleScikitLearnWrapper(final String constructorCall, final String imports, final String problem, final IPythonConfig pythonConfig) throws IOException, InterruptedException {
this.constructorCall = constructorCall;
this.imports = imports;
this.problem = problem;
this.setPythonConfig(pythonConfig);
}
private synchronized void ensurePythonRequirementsAreSatisfied() throws InterruptedException {
if (pythonRequirementsFulfilled == null) {
new PythonRequirementDefinition(PYTHON_MINIMUM_REQUIRED_VERSION_REL, PYTHON_MINIMUM_REQUIRED_VERSION_MAJ, PYTHON_MINIMUM_REQUIRED_VERSION_MIN, PYTHON_REQUIRED_MODULES, PYTHON_OPTIONAL_MODULES).check(this.pythonC);
}
pythonRequirementsFulfilled = true;
}
@Override
public void fit(final ILabeledDataset<? extends ILabeledInstance> dTrain) throws TrainingException, InterruptedException {
this.trainingData = dTrain;
}
@Override
public String getLoggerName() {
return this.logger.getName();
}
@Override
public void setLoggerName(final String name) {
this.logger = LoggerFactory.getLogger(name);
}
private synchronized File getOrWriteDataFile(final ILabeledDataset<? extends ILabeledInstance> dataset, final String dataFileName) throws ScikitLearnWrapperExecutionFailedException, IOException {
File dataFile = this.getDatasetFile(dataFileName);
if (dataFile.exists()) {
this.logger.debug("Reusing dataset: {}", dataFileName);
return dataFile;
}
try {
if (this.sklearnClassifierConfig.getDeleteFileOnExit()) {
dataFile.deleteOnExit();
}
this.logger.debug("Serializing {}x{} dataset to {}", dataset.size(), dataset.getNumAttributes(), dataFileName);
new ArffDatasetAdapter().serializeDataset(dataFile, dataset);
} catch (IOException e1) {
throw new ScikitLearnWrapperExecutionFailedException("Could not dump data file for prediction", e1);
}
this.logger.debug("Serialization completed.");
return dataFile;
}
private synchronized File getDatasetFile(final String datasetName) throws IOException {
File datasetFile = new File(getTempDir(), datasetName + ".arff");
if (this.sklearnClassifierConfig.getDeleteFileOnExit()) {
datasetFile.deleteOnExit();
}
return datasetFile;
}
private static synchronized File getTempDir() throws IOException {
if (tempDir == null) {
tempDir = Files.createTempDirectory("ailibs-dumps").toFile();
tempDir.deleteOnExit();
}
return tempDir;
}
protected File executePipeline(final ILabeledDataset<? extends ILabeledInstance> dTest) throws IOException, InterruptedException, ScikitLearnWrapperExecutionFailedException {
this.executable = Files.createTempFile("sklearn-classifier-", ".py").toFile();
this.executable.deleteOnExit();
String template = ResourceUtil.readResourceFileToString(this.pathExecutableTemplate);
template = template.replace("{{pipeline}}", this.constructorCall);
template = template.replace("{{import}}", this.imports);
try (BufferedWriter bw = new BufferedWriter(new FileWriter(this.executable))) {
bw.write(template);
}
this.outputFile = Files.createTempFile("sklearn-predictions", ".json").toFile();
this.outputFile.deleteOnExit();
File fitFile = this.getOrWriteDataFile(this.trainingData, this.getDataName(this.trainingData));
File predictFile = this.getOrWriteDataFile(dTest, this.getDataName(dTest));
List<String> command = new ArrayList<>();
command.add(this.executable.getCanonicalPath());
command.add("--fit");
command.add(fitFile.getCanonicalPath());
command.add("--predict");
command.add(predictFile.getCanonicalPath());
command.add("--problem");
command.add(this.problem);
command.add("--predictOutput");
command.add(this.outputFile.getCanonicalPath());
int exitCode = this.putil.executeScriptFile(command);
if (exitCode != 0) {
throw new ScikitLearnWrapperExecutionFailedException("Spawned python process has not terminated cleanly.");
}
return this.outputFile;
}
@Override
public void setModelPath(final String modelPath) throws IOException {
this.logger.debug(LOG_SERIALIZATION_NOT_IMPLEMENTED);
}
@Override
public File getModelPath() {
this.logger.debug(LOG_SERIALIZATION_NOT_IMPLEMENTED);
return null;
}
@Override
public File getModelFile() {
this.logger.debug(LOG_SERIALIZATION_NOT_IMPLEMENTED);
return null;
}
@Override
public void setTargetIndices(final int... targetIndices) {
this.logger.debug("The simple scikit-learn classifier wrapper does not support multiple targets.");
}
@Override
public String toString() {
return this.constructorCall;
}
@Override
public void setSeed(final long seed) {
this.logger.debug("The simple scikit-learn classifier wrapper does not support setting a seed.");
}
@Override
public void setTimeout(final Timeout timeout) {
this.logger.debug("The simple scikit-learn classifier wrapper does not support setting a timeout.");
}
@Override
public void fit(final String trainingDataName) throws TrainingException, InterruptedException {
this.logger.debug("The simple scikit-learn classifier wrapper does not support fitting providing a path only.");
}
@Override
public File getOutputFile(final String dataName) {
this.logger.debug("The simple scikit-learn classifier wrapper does not support retrieving the output file.");
return this.outputFile;
}
@Override
public void setPythonTemplate(final String pythonTemplatePath) throws IOException {
this.pathExecutableTemplate = pythonTemplatePath;
}
@Override
public void setPythonConfig(final IPythonConfig pythonConfig) throws IOException, InterruptedException {
this.pythonC = pythonConfig;
this.putil = new PythonUtil(pythonConfig);
this.ensurePythonRequirementsAreSatisfied();
}
@Override
public void setScikitLearnWrapperConfig(final IScikitLearnWrapperConfig scikitLearnWrapperConfig) {
this.sklearnClassifierConfig = scikitLearnWrapperConfig;
}
@Override
public File getSKLearnScriptFile() {
return this.executable;
}
@Override
public String getDataName(final ILabeledDataset<? extends ILabeledInstance> data) {
String hash = "" + data.hashCode();
hash = hash.startsWith("-") ? hash.replace("-", "1") : "0" + hash;
return hash;
}
@SuppressWarnings("unchecked")
@Override
public B predict(final ILabeledInstance[] dTest) throws PredictionException, InterruptedException {
ILabeledDataset<ILabeledInstance> dataset;
try {
dataset = (ILabeledDataset<ILabeledInstance>) this.trainingData.createEmptyCopy();
} catch (InterruptedException e) {
throw e;
} catch (DatasetCreationException e) {
throw new PredictionException("Could not create empty test dataset copy.", e);
}
Arrays.stream(dTest).forEach(dataset::add);
return this.predict(dataset);
}
@SuppressWarnings("unchecked")
@Override
public P predict(final ILabeledInstance xTest) throws PredictionException, InterruptedException {
try {
ILabeledDataset<ILabeledInstance> dTest = (ILabeledDataset<ILabeledInstance>) this.trainingData.createEmptyCopy();
dTest.add(xTest);
return (P) this.predict(dTest).get(0);
} catch (InterruptedException e) {
throw e;
} catch (DatasetCreationException e) {
throw new PredictionException("Could not predict due to a DatasetCreationException", e);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.