index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/CategoricalParameterDomain.java
|
package ai.libs.hasco.model;
import java.util.Arrays;
import java.util.Collection;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class CategoricalParameterDomain implements IParameterDomain {
private final String[] values;
@SuppressWarnings("unused")
private CategoricalParameterDomain() {
// for serialization
this.values = null;
}
@JsonCreator
public CategoricalParameterDomain(@JsonProperty("values") final String[] values) {
super();
this.values = values;
}
@JsonCreator
public CategoricalParameterDomain(@JsonProperty("values") final Collection<String> values) {
this(values.toArray(new String[] {}));
}
public String[] getValues() {
return this.values;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(this.values);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
CategoricalParameterDomain other = (CategoricalParameterDomain) obj;
return Arrays.equals(this.values, other.values);
}
@Override
public boolean contains(final Object item) {
if (item == null) {
throw new IllegalArgumentException("Cannot request membership of NULL in a categorical parameter domain.");
}
String itemAsString = item.toString();
for (int i = 0; i < this.values.length; i++) {
if (this.values[i].equals(itemAsString)) {
return true;
}
}
return false;
}
@Override
public boolean subsumes(final IParameterDomain otherDomain) {
if (!(otherDomain instanceof CategoricalParameterDomain)) {
return false;
}
CategoricalParameterDomain otherCategoricalDomain = (CategoricalParameterDomain) otherDomain;
return Arrays.asList(this.values).containsAll(Arrays.asList(otherCategoricalDomain.getValues()));
}
@Override
public String toString() {
return "CategoricalParameterDomain [values=" + Arrays.toString(this.values) + "]";
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/Component.java
|
package ai.libs.hasco.model;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ai.libs.jaicore.basic.sets.PartialOrderedSet;
import ai.libs.jaicore.logging.ToJSONStringUtil;
/**
* A <code>Component</code> is described by - a name - a collection of provided interfaces - a list of required interfaces - a set of parameters - a list of dependencies and can be used to describe
* any kind of components and model complex multi-component systems. More specifically, <code>Component</code>s are used to model the search space of HASCO. By recursively resolving required
* interfaces until there are no open choices left, HASCO may transform your component description automatically into an HTN planning problem to automatically optimize a component setup for a specific
* task.
*
* @author fmohr, wever
*/
@JsonPropertyOrder({ "name", "parameters", "dependencies", "providedInterfaces", "requiredInterfaces" })
public class Component {
/* Logger */
private static final Logger L = LoggerFactory.getLogger(Component.class);
/* Description of the component. */
private final String name;
private Collection<String> providedInterfaces = new ArrayList<>();
private Map<String, String> requiredInterfaces = new LinkedHashMap<>();
private PartialOrderedSet<Parameter> parameters = new PartialOrderedSet<>();
private Collection<Dependency> dependencies = new ArrayList<>();
/**
* Constructor creating an empty <code>Component</code> with a specific name.
*
* @param name
* The name of the <code>Component</code>.
*/
public Component(final String name) {
super();
this.name = name;
this.getProvidedInterfaces().add(this.name);
}
/**
* Constructor for a component giving the provided and required interfaces, the collection of parameters and a list of dependencies.
*
* @param name
* The name of the <code>Component</code>.
* @param providedInterfaces
* The collection of provided interfaces.
* @param requiredInterfaces
* The list of required interfaces.
* @param parameters
* Parameters of the <code>Component</code>.
* @param dependencies
* A list of dependencies to constrain the values of parameters (may be empty).
*/
@JsonCreator
public Component(@JsonProperty("name") final String name, @JsonProperty("providedInterfaces") final Collection<String> providedInterfaces, @JsonProperty("requiredInterfaces") final Map<String, String> requiredInterfaces,
@JsonProperty("parameters") final PartialOrderedSet<Parameter> parameters, @JsonProperty("dependencies") final Collection<Dependency> dependencies) {
this(name);
this.providedInterfaces = providedInterfaces;
this.requiredInterfaces = new LinkedHashMap<>(requiredInterfaces);
this.parameters = parameters;
this.dependencies = new ArrayList<>(dependencies);
}
/**
* Constructor for a component giving the provided and required interfaces, the collection of parameters and a list of dependencies.
*
* @param name
* The name of the <code>Component</code>.
* @param providedInterfaces
* The collection of provided interfaces.
* @param requiredInterfaces
* The list of required interfaces.
* @param parameters
* Parameters of the <code>Component</code>.
* @param dependencies
* A list of dependencies to constrain the values of parameters (may be empty).
*/
public Component(final String name, final Collection<String> providedInterfaces, final List<Map<String, String>> requiredInterfaces, final PartialOrderedSet<Parameter> parameters, final List<Dependency> dependencies) {
this(name);
this.providedInterfaces = providedInterfaces;
this.requiredInterfaces = new LinkedHashMap<>();
requiredInterfaces.stream().forEach(this.requiredInterfaces::putAll);
this.parameters = parameters;
this.dependencies = dependencies;
}
/**
* @return The name of the Component.
*/
public String getName() {
return this.name;
}
/**
* @return The map of required interfaces.
*/
public Map<String, String> getRequiredInterfaces() {
return this.requiredInterfaces;
}
/**
* @return The collection of provided interfaces.
*/
public Collection<String> getProvidedInterfaces() {
return this.providedInterfaces;
}
/**
* @return The set of parameters of this Component.
*/
public PartialOrderedSet<Parameter> getParameters() {
return this.parameters;
}
/**
* Returns the parameter for a given name.
*
* @param paramName
* The name of the parameter to be returned.
* @return The parameter for the given name.
*/
public Parameter getParameterWithName(final String paramName) {
Optional<Parameter> param = this.parameters.stream().filter(p -> p.getName().equals(paramName)).findFirst();
if (!param.isPresent()) {
throw new IllegalArgumentException("Component " + this.name + " has no parameter with name \"" + paramName + "\". Available parameters: " + this.parameters);
}
return param.get();
}
/**
* @return The collection of dependencies on the parameters of this <code>Component</code>.
*/
public Collection<Dependency> getDependencies() {
return this.dependencies;
}
/**
* Adds another provided interface to the collection of provided interfaces.
*
* @param interfaceName
* The interface to be added to the provided interfaces.
*/
public boolean addProvidedInterface(final String interfaceName) {
if (!this.providedInterfaces.contains(interfaceName)) {
return this.providedInterfaces.add(interfaceName);
} else {
return false;
}
}
/**
* Adds an additional required interface with an ID (local identifier) and an interface name (provided interface of another Component) to the required interfaces of this Component.
*
* @param interfaceID
* The local identifier to reference the specific required interface.
* @param interfaceName
* The provided interface of another component.
*/
public void addRequiredInterface(final String interfaceID, final String interfaceName) {
this.requiredInterfaces.put(interfaceID, interfaceName);
}
/**
* Adds a parameter to the set of parameters iff the parameter or another parameter with the same name does not yet exist.
*
* @param param
* The parameter to be added.
*/
public void addParameter(final Parameter param) {
if (this.parameters.stream().anyMatch(p -> p.getName().equals(param.getName()))) {
throw new IllegalArgumentException("Component " + this.name + " already has a parameter with name " + param.getName());
}
this.parameters.add(param);
}
/**
* Adds a dependency constraint to the dependencies of this Component.
*
* @param dependency
* The dependency to be added.
*/
public void addDependency(final Dependency dependency) {
/*
* check whether this dependency is coherent with the current partial order on
* the parameters
*/
Collection<Parameter> paramsInPremise = new HashSet<>();
dependency.getPremise().forEach(c -> c.forEach(i -> paramsInPremise.add(i.getX())));
Collection<Parameter> paramsInConclusion = new HashSet<>();
dependency.getConclusion().forEach(i -> paramsInConclusion.add(i.getX()));
for (Parameter before : paramsInPremise) {
for (Parameter after : paramsInConclusion) {
this.parameters.requireABeforeB(before, after);
}
}
/* add the dependency to the set of dependencies */
this.dependencies.add(dependency);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.dependencies == null) ? 0 : this.dependencies.hashCode());
result = prime * result + ((this.name == null) ? 0 : this.name.hashCode());
result = prime * result + ((this.parameters == null) ? 0 : this.parameters.hashCode());
result = prime * result + ((this.providedInterfaces == null) ? 0 : this.providedInterfaces.hashCode());
result = prime * result + ((this.requiredInterfaces == null) ? 0 : this.requiredInterfaces.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
Component other = (Component) obj;
if (this.dependencies == null) {
if (other.dependencies != null) {
return false;
}
} else if (!this.dependencies.equals(other.dependencies)) {
return false;
}
if (this.name == null) {
if (other.name != null) {
return false;
}
} else if (!this.name.equals(other.name)) {
return false;
}
if (this.parameters == null) {
if (other.parameters != null) {
return false;
}
} else if (!this.parameters.equals(other.parameters)) {
return false;
}
if (this.providedInterfaces == null) {
if (other.providedInterfaces != null) {
return false;
}
} else if (!this.providedInterfaces.equals(other.providedInterfaces)) {
return false;
}
if (this.requiredInterfaces == null) {
if (other.requiredInterfaces != null) {
return false;
}
} else if (!this.requiredInterfaces.equals(other.requiredInterfaces)) {
return false;
}
return true;
}
@Override
public String toString() {
try {
return new ObjectMapper().writeValueAsString(this);
} catch (JsonProcessingException e) {
L.warn("Could not directly serialize Component to JSON: ", e);
}
Map<String, Object> fields = new HashMap<>();
fields.put("name", this.name);
fields.put("providedInterfaces", this.providedInterfaces);
fields.put("requiredInterfaces", this.requiredInterfaces);
fields.put("parameters", this.parameters);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/ComponentInstance.java
|
package ai.libs.hasco.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.logging.ToJSONStringUtil;
/**
* For a given <code>Component</code>, a <code>Component Instance</code> defines all parameter values and the required interfaces (recursively) and thus provides a grounding of the respective
* <code>Component</code>.
*
* @author fmohr, mwever
*
*/
@JsonPropertyOrder(alphabetic = true)
public class ComponentInstance {
/* The component which serves as a kind of "type". */
private final Component component;
/* The grounding of the component including parameter values and recursively resolved required interfaces. */
private final Map<String, String> parameterValues;
private final Map<String, ComponentInstance> satisfactionOfRequiredInterfaces;
@SuppressWarnings("unused")
private ComponentInstance() {
// for serialization purposes
this.component = null;
this.parameterValues = null;
this.satisfactionOfRequiredInterfaces = null;
}
public ComponentInstance(final ComponentInstance other) {
this.component = other.component;
this.parameterValues = new HashMap<>(other.parameterValues);
this.satisfactionOfRequiredInterfaces = new HashMap<>();
other.satisfactionOfRequiredInterfaces.entrySet().forEach(x -> this.satisfactionOfRequiredInterfaces.put(x.getKey(), new ComponentInstance(x.getValue())));
}
/**
* Constructor for creating a <code>ComponentInstance</code> for a particular <code>Component</code>.
*
* @param component
* The component that is grounded.
* @param parameterValues
* A map containing the parameter values of this grounding.
* @param satisfactionOfRequiredInterfaces
* The refinement of the required interfaces.
*/
public ComponentInstance(@JsonProperty("component") final Component component, @JsonProperty("parameterValues") final Map<String, String> parameterValues,
@JsonProperty("satisfactionOfRequiredInterfaces") final Map<String, ComponentInstance> satisfactionOfRequiredInterfaces) {
super();
this.component = component;
this.parameterValues = parameterValues;
this.satisfactionOfRequiredInterfaces = satisfactionOfRequiredInterfaces;
}
/**
* @return The <code>Component</code> to this <code>ComponentInstance</code>.
*/
public Component getComponent() {
return this.component;
}
/**
* @return The parameters and how their values were set.
*/
public Map<String, String> getParameterValues() {
return this.parameterValues;
}
/**
* @return The set of parameters of which the values have been set explicitly.
*/
public Collection<Parameter> getParametersThatHaveBeenSetExplicitly() {
if (this.parameterValues == null) {
return new ArrayList<>();
}
return this.getComponent().getParameters().stream().filter(p -> this.parameterValues.containsKey(p.getName())).collect(Collectors.toList());
}
/**
* @return The set of parameters of which the values have not been set explicitly.
*/
public Collection<Parameter> getParametersThatHaveNotBeenSetExplicitly() {
return SetUtil.difference(this.component.getParameters(), this.getParametersThatHaveBeenSetExplicitly());
}
/**
* @param param
* The parameter for which the value shall be returned.
* @return The value of the parameter.
*/
public String getParameterValue(final Parameter param) {
return this.getParameterValue(param.getName());
}
/**
* @param paramName
* The name of the parameter for which the value is requested.
* @return The value of the parameter with the given name.
*/
public String getParameterValue(final String paramName) {
return this.parameterValues.get(paramName);
}
/**
* @return This method returns a mapping of interface IDs to component instances.
*/
public Map<String, ComponentInstance> getSatisfactionOfRequiredInterfaces() {
return this.satisfactionOfRequiredInterfaces;
}
/**
* @return A collection of all components contained (recursively) in this <code>ComponentInstance</code>.
*/
public Collection<Component> getContainedComponents() {
Collection<Component> components = new HashSet<>();
components.add(this.getComponent());
for (ComponentInstance ci : this.satisfactionOfRequiredInterfaces.values()) {
components.addAll(ci.getContainedComponents());
}
return components;
}
/**
* This method checks, whether a given list of paths of refinements conforms the constraints for parameter refinements.
*
* @param paths
* A list of paths of refinements to be checked.
* @return Returns true if everything is alright and false if there is an issue with the given paths.
*/
public boolean matchesPathRestrictions(final Collection<List<Pair<String, String>>> paths) {
for (List<Pair<String, String>> path : paths) {
if (!this.matchesPathRestriction(path)) {
return false;
}
}
return true;
}
/**
* This method checks, whether a path of refinements conforms the constraints for parameter refinements.
*
* @param path
* A path of refinements to be checked.
* @return Returns true if everything is alright and false if there is an issue with the given path.
*/
public boolean matchesPathRestriction(final List<Pair<String, String>> path) {
if (path.isEmpty()) {
return true;
}
/* if the first entry is on null, we interpret it as a filter on this component itself */
int i = 0;
if (path.get(0).getX() == null) {
String requiredComponent = path.get(0).getY();
if (!requiredComponent.equals("*") && !this.component.getName().equals(requiredComponent)) {
return false;
}
i = 1;
}
/* now go over the rest of the path and check every entry on conformity */
ComponentInstance current = this;
int n = path.size();
for (; i < n; i++) {
Pair<String, String> selection = path.get(i);
if (!current.getComponent().getRequiredInterfaces().containsKey(selection.getX())) {
throw new IllegalArgumentException("Invalid path restriction " + path + ": " + selection.getX() + " is not a required interface of " + current.getComponent().getName());
}
ComponentInstance instanceChosenForRequiredInterface = current.getSatisfactionOfRequiredInterfaces().get(selection.getX());
if (!selection.getY().equals("*") && !instanceChosenForRequiredInterface.getComponent().getName().equals(selection.getY())) {
return false;
}
current = instanceChosenForRequiredInterface;
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.component == null) ? 0 : this.component.hashCode());
result = prime * result + ((this.parameterValues == null) ? 0 : this.parameterValues.hashCode());
result = prime * result + ((this.satisfactionOfRequiredInterfaces == null) ? 0 : this.satisfactionOfRequiredInterfaces.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
ComponentInstance other = (ComponentInstance) obj;
if (this.component == null) {
if (other.component != null) {
return false;
}
} else if (!this.component.equals(other.component)) {
return false;
}
if (this.parameterValues == null) {
if (other.parameterValues != null) {
return false;
}
} else if (!this.parameterValues.equals(other.parameterValues)) {
return false;
}
if (this.satisfactionOfRequiredInterfaces == null) {
if (other.satisfactionOfRequiredInterfaces != null) {
return false;
}
} else if (!this.satisfactionOfRequiredInterfaces.equals(other.satisfactionOfRequiredInterfaces)) {
return false;
}
return true;
}
public String toComponentNameString() {
StringBuilder sb = new StringBuilder();
sb.append(this.getComponent().getName());
if (!this.satisfactionOfRequiredInterfaces.isEmpty()) {
sb.append(this.satisfactionOfRequiredInterfaces.entrySet().stream().map(x -> x.getValue().toComponentNameString()).collect(Collectors.toList()).toString());
}
return sb.toString();
}
@JsonIgnore
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("component", this.component);
fields.put("parameterValues", this.parameterValues);
fields.put("satisfactionOfRequiredInterfaces", this.satisfactionOfRequiredInterfaces);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
/**
* Returns the description of a <code>ComponentInstance</code> as a pretty print with indentation.
*
* @return A string representing this object in JSON format.
* @throws IOException
* An IOException is thrown if the object cannot be serialized to a String.
*/
@JsonIgnore
public String getPrettyPrint() throws IOException {
return new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT).writeValueAsString(this);
}
public String getNestedComponentDescription() {
StringBuilder sb = new StringBuilder();
sb.append(this.getComponent().getName());
this.satisfactionOfRequiredInterfaces.values().stream().map(x -> " - " + x.getNestedComponentDescription()).forEach(sb::append);
return sb.toString();
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/ComponentInstanceUtil.java
|
package ai.libs.hasco.model;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import ai.libs.hasco.core.Util;
/**
* The ComponentInstanceUtil provides some utilities to deal with component instances.
* For instance, it may be used to check whether a ComponentInstance conforms the dependencies
* defined in the respective Component.
*
* @author wever
*/
public class ComponentInstanceUtil {
private ComponentInstanceUtil() {
/* Private constructor to prevent anyone to instantiate this Util class by accident. */
}
/**
* Checks whether a component instance adheres to the defined inter-parameter dependencies defined in the component.
* @param ci The component instance to be verified.
* @return Returns true iff all dependency conditions hold.
*/
public static boolean isValidComponentInstantiation(final ComponentInstance ci) {
Map<Parameter, IParameterDomain> refinedDomainMap = new HashMap<>();
for (Parameter param : ci.getComponent().getParameters()) {
if (param.getDefaultDomain() instanceof NumericParameterDomain) {
double parameterValue = Double.parseDouble(ci.getParameterValue(param));
refinedDomainMap.put(param, new NumericParameterDomain(((NumericParameterDomain) param.getDefaultDomain()).isInteger(), parameterValue, parameterValue));
} else if (param.getDefaultDomain() instanceof CategoricalParameterDomain) {
refinedDomainMap.put(param, new CategoricalParameterDomain(Arrays.asList(ci.getParameterValue(param))));
}
}
for (Dependency dependency : ci.getComponent().getDependencies()) {
if (Util.isDependencyPremiseSatisfied(dependency, refinedDomainMap) && !Util.isDependencyConditionSatisfied(dependency.getConclusion(), refinedDomainMap)) {
return false;
}
}
return true;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/ComponentUtil.java
|
package ai.libs.hasco.model;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.basic.kvstore.KVStore;
import ai.libs.jaicore.basic.sets.SetUtil;
/**
* The ComponentUtil class can be used to deal with Components in a convenient way.
* For instance, for a given component (type) it can be used to return a parameterized ComponentInstance.
*
* @author wever
*/
public class ComponentUtil {
private static final Logger logger = LoggerFactory.getLogger(ComponentUtil.class);
private ComponentUtil() {
/* Intentionally left blank to prevent instantiation of this class. */
}
/**
* This procedure returns a ComponentInstance of the given Component with default parameterization.
* Note that required interfaces are not resolved.
*
* @param component The component for which a random parameterization is to be returned.
* @return An instantiation of the component with default parameterization.
*/
public static ComponentInstance defaultParameterizationOfComponent(final Component component) {
Map<String, String> parameterValues = new HashMap<>();
for (Parameter p : component.getParameters()) {
parameterValues.put(p.getName(), p.getDefaultValue() + "");
}
return componentInstanceWithNoRequiredInterfaces(component, parameterValues);
}
/**
* This procedure returns a valid random parameterization of a given component. Random decisions are made with the help of the given Random object.
* Note that required interfaces are not resolved.
*
* @param component The component for which a random parameterization is to be returned.
* @param rand The Random instance for making the random decisions.
* @return An instantiation of the component with valid random parameterization.
*/
public static ComponentInstance randomParameterizationOfComponent(final Component component, final Random rand) {
ComponentInstance ci;
do {
Map<String, String> parameterValues = new HashMap<>();
for (Parameter p : component.getParameters()) {
if (p.getDefaultDomain() instanceof CategoricalParameterDomain) {
String[] values = ((CategoricalParameterDomain) p.getDefaultDomain()).getValues();
parameterValues.put(p.getName(), values[rand.nextInt(values.length)]);
} else {
NumericParameterDomain numDomain = (NumericParameterDomain) p.getDefaultDomain();
if (numDomain.isInteger()) {
if ((int) (numDomain.getMax() - numDomain.getMin()) > 0) {
parameterValues.put(p.getName(), ((int) (rand.nextInt((int) (numDomain.getMax() - numDomain.getMin())) + numDomain.getMin())) + "");
} else {
parameterValues.put(p.getName(), (int) p.getDefaultValue() + "");
}
} else {
parameterValues.put(p.getName(), (rand.nextDouble() * (numDomain.getMax() - numDomain.getMin()) + numDomain.getMin()) + "");
}
}
}
ci = componentInstanceWithNoRequiredInterfaces(component, parameterValues);
} while (!ComponentInstanceUtil.isValidComponentInstantiation(ci));
return ci;
}
private static ComponentInstance componentInstanceWithNoRequiredInterfaces(final Component component, final Map<String, String> parameterValues) {
return new ComponentInstance(component, parameterValues, new HashMap<>());
}
/**
* Searches and returns all components within a collection of components that provide a specific interface.
*
* @param components The collection of components to search in.
* @param providedInterface The interface of interest.
* @return A sub-collection of components all of which provide the requested providedInterface.
*/
public static Collection<Component> getComponentsProvidingInterface(final Collection<Component> components, final String providedInterface) {
return components.stream().filter(x -> x.getProvidedInterfaces().contains(providedInterface)).collect(Collectors.toList());
}
/**
* Enumerates all possible component instances for a specific root component and a collection of components for resolving required interfaces.
* Hyperparameters are set to the default value.
*
* @param rootComponent The component to be considered the root.
* @param components The collection fo components that is used for resolving required interfaces recursively.
* @return A collection of component instances of the given root component with all possible algorithm choices.
*/
public static Collection<ComponentInstance> getAllAlgorithmSelectionInstances(final Component rootComponent, final Collection<Component> components) {
Collection<ComponentInstance> instanceList = new LinkedList<>();
instanceList.add(ComponentUtil.defaultParameterizationOfComponent(rootComponent));
for (Entry<String, String> requiredInterface : rootComponent.getRequiredInterfaces().entrySet()) {
List<ComponentInstance> tempList = new LinkedList<>();
Collection<Component> possiblePlugins = ComponentUtil.getComponentsProvidingInterface(components, requiredInterface.getValue());
for (ComponentInstance ci : instanceList) {
for (Component possiblePlugin : possiblePlugins) {
for (ComponentInstance reqICI : getAllAlgorithmSelectionInstances(possiblePlugin, components)) {
ComponentInstance copyOfCI = new ComponentInstance(ci.getComponent(), new HashMap<>(ci.getParameterValues()), new HashMap<>(ci.getSatisfactionOfRequiredInterfaces()));
copyOfCI.getSatisfactionOfRequiredInterfaces().put(requiredInterface.getKey(), reqICI);
tempList.add(copyOfCI);
}
}
}
instanceList.clear();
instanceList.addAll(tempList);
}
return instanceList;
}
/**
* Enumerates all possible component instances for a specific root component and a collection of components for resolving required interfaces.
* Hyperparameters are set to the default value.
*
* @param requiredInterface The interface required to be provided by the root components.
* @param components The collection fo components that is used for resolving required interfaces recursively.
* @return A collection of component instances of the given root component with all possible algorithm choices.
*/
public static Collection<ComponentInstance> getAllAlgorithmSelectionInstances(final String requiredInterface, final Collection<Component> components) {
Collection<ComponentInstance> instanceList = new LinkedList<>();
components.stream().filter(x -> x.getProvidedInterfaces().contains(requiredInterface)).map(x -> getAllAlgorithmSelectionInstances(x, components)).forEach(instanceList::addAll);
return instanceList;
}
public static int getNumberOfUnparametrizedCompositions(final Collection<Component> components, final String requiredInterface) {
if (hasCycles(components, requiredInterface)) {
return -1;
}
Collection<Component> candidates = components.stream().filter(c -> c.getProvidedInterfaces().contains(requiredInterface)).collect(Collectors.toList());
int numCandidates = 0;
for (Component candidate : candidates) {
int waysToResolveComponent = 0;
if (candidate.getRequiredInterfaces().isEmpty()) {
waysToResolveComponent = 1;
} else {
for (String req : candidate.getRequiredInterfaces().keySet()) {
int subSolutionsForThisInterface = getNumberOfUnparametrizedCompositions(components, candidate.getRequiredInterfaces().get(req));
if (waysToResolveComponent > 0) {
waysToResolveComponent *= subSolutionsForThisInterface;
} else {
waysToResolveComponent = subSolutionsForThisInterface;
}
}
}
numCandidates += waysToResolveComponent;
}
return numCandidates;
}
public ComponentInstance getRandomParametrization(final ComponentInstance componentInstance, final Random rand) {
ComponentInstance randomParametrization = randomParameterizationOfComponent(componentInstance.getComponent(), rand);
componentInstance.getSatisfactionOfRequiredInterfaces().entrySet().forEach(x -> randomParametrization.getSatisfactionOfRequiredInterfaces().put(x.getKey(), this.getRandomParametrization(x.getValue(), rand)));
return randomParametrization;
}
public static boolean hasCycles(final Collection<Component> components, final String requiredInterface) {
return hasCycles(components, requiredInterface, new LinkedList<>());
}
private static boolean hasCycles(final Collection<Component> components, final String requiredInterface, final List<String> componentList) {
Collection<Component> candidates = components.stream().filter(c -> c.getProvidedInterfaces().contains(requiredInterface)).collect(Collectors.toList());
for (Component c : candidates) {
if (componentList.contains(c.getName())) {
return true;
}
List<String> componentListCopy = new LinkedList<>(componentList);
componentListCopy.add(c.getName());
for (String subRequiredInterface : c.getRequiredInterfaces().values()) {
if (hasCycles(components, subRequiredInterface, componentListCopy)) {
return true;
}
}
}
return false;
}
public static boolean isDefaultConfiguration(final ComponentInstance instance) {
for (Parameter p : instance.getParametersThatHaveBeenSetExplicitly()) {
if (p.isNumeric()) {
List<String> intervalAsList = SetUtil.unserializeList(instance.getParameterValue(p));
double defaultValue = Double.parseDouble(p.getDefaultValue().toString());
boolean isCompatibleWithDefaultValue = defaultValue >= Double.parseDouble(intervalAsList.get(0)) && defaultValue <= Double.parseDouble(intervalAsList.get(1));
if (!isCompatibleWithDefaultValue) {
logger.info("{} has value {}, which does not subsume the default value {}", p.getName(), instance.getParameterValue(p), defaultValue);
return false;
} else {
logger.info("{} has value {}, which IS COMPATIBLE with the default value {}", p.getName(), instance.getParameterValue(p), defaultValue);
}
} else {
if (!instance.getParameterValue(p).equals(p.getDefaultValue().toString())) {
logger.info("{} has value {}, which is not the default {}", p.getName(), instance.getParameterValue(p), p.getDefaultValue());
return false;
}
}
}
for (ComponentInstance child : instance.getSatisfactionOfRequiredInterfaces().values()) {
if (!isDefaultConfiguration(child)) {
return false;
}
}
return true;
}
public static KVStore getStatsForComponents(final Collection<Component> components) {
KVStore stats = new KVStore();
int numComponents = 0;
int numNumericParams = 0;
int numIntParams = 0;
int numDoubleParams = 0;
int numCatParams = 0;
int numBoolParams = 0;
int otherParams = 0;
for (Component c : components) {
numComponents++;
for (Parameter p : c.getParameters()) {
if (p.getDefaultDomain() instanceof CategoricalParameterDomain) {
numCatParams++;
if (p.getDefaultDomain() instanceof BooleanParameterDomain) {
numBoolParams++;
}
} else if (p.getDefaultDomain() instanceof NumericParameterDomain) {
numNumericParams++;
if (((NumericParameterDomain) p.getDefaultDomain()).isInteger()) {
numIntParams++;
} else {
numDoubleParams++;
}
} else {
otherParams++;
}
}
}
stats.put("nComponents", numComponents);
stats.put("nNumericParameters", numNumericParams);
stats.put("nIntegerParameters", numIntParams);
stats.put("nContinuousParameters", numDoubleParams);
stats.put("nCategoricalParameters", numCatParams);
stats.put("nBooleanParameters", numBoolParams);
stats.put("nOtherParameters", otherParams);
return stats;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/Dependency.java
|
package ai.libs.hasco.model;
import java.util.Collection;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import ai.libs.jaicore.basic.sets.Pair;
public class Dependency {
private final Collection<Collection<Pair<Parameter, IParameterDomain>>> premise; // semantics are DNF (every entry is an AND-connected constraint)
private final Collection<Pair<Parameter, IParameterDomain>> conclusion;
@JsonCreator
public Dependency(@JsonProperty("premise") final Collection<Collection<Pair<Parameter, IParameterDomain>>> premise, @JsonProperty("conclusion") final Collection<Pair<Parameter, IParameterDomain>> conclusion) {
super();
this.premise = premise;
this.conclusion = conclusion;
}
public Collection<Collection<Pair<Parameter, IParameterDomain>>> getPremise() {
return this.premise;
}
public Collection<Pair<Parameter, IParameterDomain>> getConclusion() {
return this.conclusion;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.conclusion == null) ? 0 : this.conclusion.hashCode());
result = prime * result + ((this.premise == null) ? 0 : this.premise.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
Dependency other = (Dependency) obj;
if (this.conclusion == null) {
if (other.conclusion != null) {
return false;
}
} else if (!this.conclusion.equals(other.conclusion)) {
return false;
}
if (this.premise == null) {
if (other.premise != null) {
return false;
}
} else if (!this.premise.equals(other.premise)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(this.premise);
sb.append(" => ");
sb.append(this.conclusion);
return sb.toString();
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/EvaluatedSoftwareConfigurationSolution.java
|
package ai.libs.hasco.model;
import org.api4.java.common.attributedobjects.ScoredItem;
public interface EvaluatedSoftwareConfigurationSolution<V extends Comparable<V>> extends ScoredItem<V> {
public ComponentInstance getComponentInstance();
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/IParameterDomain.java
|
package ai.libs.hasco.model;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
public interface IParameterDomain {
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonSubTypes({ @Type(value = NumericParameterDomain.class, name = "numeric"), @Type(value = CategoricalParameterDomain.class, name = "categorical"), @Type(value = BooleanParameterDomain.class, name = "boolean") })
public boolean contains(Object item);
public boolean subsumes(IParameterDomain otherDomain);
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/NumericParameterDomain.java
|
package ai.libs.hasco.model;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class NumericParameterDomain implements IParameterDomain {
private final boolean isInteger;
private final double min;
private final double max;
@SuppressWarnings("unused")
private NumericParameterDomain() {
// for serialization
this.isInteger = true;
this.min = 0;
this.max = 0;
}
@JsonCreator
public NumericParameterDomain(@JsonProperty("integer") final boolean isInteger, @JsonProperty("min") final double min, @JsonProperty("max") final double max) {
super();
this.isInteger = isInteger;
this.min = min;
this.max = max;
}
public boolean isInteger() {
return this.isInteger;
}
public double getMin() {
return this.min;
}
public double getMax() {
return this.max;
}
@Override
public String toString() {
return "NumericParameterDomain [isInteger=" + this.isInteger + ", min=" + this.min + ", max=" + this.max + "]";
}
@Override
public boolean contains(final Object item) {
if (!(item instanceof Number)) {
return false;
}
Double n = (Double) item;
return n >= this.min && n <= this.max;
}
@Override
public boolean subsumes(final IParameterDomain otherDomain) {
if (!(otherDomain instanceof NumericParameterDomain)) {
return false;
}
NumericParameterDomain otherNumericDomain = (NumericParameterDomain) otherDomain;
if (this.isInteger && !otherNumericDomain.isInteger) {
return false;
}
return this.min <= otherNumericDomain.getMin() && this.max >= otherNumericDomain.getMax();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (this.isInteger ? 1231 : 1237);
long temp;
temp = Double.doubleToLongBits(this.max);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.min);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
NumericParameterDomain other = (NumericParameterDomain) obj;
if (this.isInteger != other.isInteger) {
return false;
}
if (Double.doubleToLongBits(this.max) != Double.doubleToLongBits(other.max)) {
return false;
}
return Double.doubleToLongBits(this.min) == Double.doubleToLongBits(other.min);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/Parameter.java
|
package ai.libs.hasco.model;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
@JsonPropertyOrder({"name", "defaultDomain", "defaultValue"})
public class Parameter {
private final String name;
private final IParameterDomain defaultDomain;
private final Object defaultValue;
@SuppressWarnings("unused")
private Parameter() {
// for serialization purposes
name = null;
defaultDomain = null;
defaultValue = null;
}
@JsonCreator
public Parameter(@JsonProperty("name") final String name, @JsonProperty("defaultDomain") final IParameterDomain defaultDomain,@JsonProperty("defaultValue") final Object defaultValue) {
super();
this.name = name;
this.defaultDomain = defaultDomain;
this.defaultValue = defaultValue;
}
public String getName() {
return this.name;
}
public IParameterDomain getDefaultDomain() {
return this.defaultDomain;
}
public Object getDefaultValue() {
return this.defaultValue;
}
public boolean isNumeric() {
return this.defaultDomain instanceof NumericParameterDomain;
}
public boolean isCategorical() {
return this.defaultDomain instanceof CategoricalParameterDomain;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.defaultDomain == null) ? 0 : this.defaultDomain.hashCode());
result = prime * result + ((this.defaultValue == null) ? 0 : this.defaultValue.hashCode());
result = prime * result + ((this.name == null) ? 0 : this.name.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
Parameter other = (Parameter) obj;
if (this.defaultDomain == null) {
if (other.defaultDomain != null) {
return false;
}
} else if (!this.defaultDomain.equals(other.defaultDomain)) {
return false;
}
if (this.defaultValue == null) {
if (other.defaultValue != null) {
return false;
}
} else if (!this.defaultValue.equals(other.defaultValue)) {
return false;
}
if (this.name == null) {
if (other.name != null) {
return false;
}
} else if (!this.name.equals(other.name)) {
return false;
}
return true;
}
@Override
public String toString() {
return this.name;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/ParameterRefinementConfiguration.java
|
package ai.libs.hasco.model;
public class ParameterRefinementConfiguration {
private final boolean initRefinementOnLogScale;
private final double focusPoint;
private final double logBasis;
private final boolean initWithExtremalPoints; // make the end-points of the interval explicit choices on the first level
private final int refinementsPerStep;
private final double intervalLength;
public ParameterRefinementConfiguration(final boolean initWithExtremalPoints, final int refinementsPerStep, final double intervalLength) {
this(Double.NaN, 0, initWithExtremalPoints, refinementsPerStep, intervalLength);
}
public ParameterRefinementConfiguration(final double focusPoint, final double logBasis, final boolean initWithExtremalPoints, final int refinementsPerStep, final double intervalLength) {
super();
this.focusPoint = focusPoint;
this.logBasis = logBasis;
this.initRefinementOnLogScale = !Double.isNaN(focusPoint);
this.initWithExtremalPoints = initWithExtremalPoints;
this.refinementsPerStep = refinementsPerStep;
this.intervalLength = intervalLength;
}
public boolean isInitRefinementOnLogScale() {
return this.initRefinementOnLogScale;
}
public double getFocusPoint() {
return this.focusPoint;
}
public double getLogBasis() {
return this.logBasis;
}
public boolean isInitWithExtremalPoints() {
return this.initWithExtremalPoints;
}
public int getRefinementsPerStep() {
return this.refinementsPerStep;
}
public double getIntervalLength() {
return this.intervalLength;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[InitiallyLogScale:");
sb.append(this.initRefinementOnLogScale);
sb.append(",RefinementsPerStep:");
sb.append(this.refinementsPerStep);
sb.append(",intervalLength:");
sb.append(this.intervalLength);
sb.append("]");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(this.focusPoint);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + (this.initRefinementOnLogScale ? 1231 : 1237);
result = prime * result + (this.initWithExtremalPoints ? 1231 : 1237);
temp = Double.doubleToLongBits(this.intervalLength);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(this.logBasis);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + this.refinementsPerStep;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
ParameterRefinementConfiguration other = (ParameterRefinementConfiguration) obj;
if (Double.doubleToLongBits(this.focusPoint) != Double.doubleToLongBits(other.focusPoint)) {
return false;
}
if (this.initRefinementOnLogScale != other.initRefinementOnLogScale) {
return false;
}
if (this.initWithExtremalPoints != other.initWithExtremalPoints) {
return false;
}
if (Double.doubleToLongBits(this.intervalLength) != Double.doubleToLongBits(other.intervalLength)) {
return false;
}
if (Double.doubleToLongBits(this.logBasis) != Double.doubleToLongBits(other.logBasis)) {
return false;
}
return this.refinementsPerStep == other.refinementsPerStep;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/model/UnparametrizedComponentInstance.java
|
package ai.libs.hasco.model;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ai.libs.jaicore.logging.ToJSONStringUtil;
public class UnparametrizedComponentInstance {
private final String componentName;
private final Map<String, UnparametrizedComponentInstance> satisfactionOfRequiredInterfaces;
public UnparametrizedComponentInstance(final String componentName, final Map<String, UnparametrizedComponentInstance> satisfactionOfRequiredInterfaces) {
super();
this.componentName = componentName;
this.satisfactionOfRequiredInterfaces = satisfactionOfRequiredInterfaces;
}
public UnparametrizedComponentInstance(final ComponentInstance composition) {
Map<String, ComponentInstance> resolvedRequiredInterfaces = composition.getSatisfactionOfRequiredInterfaces();
this.satisfactionOfRequiredInterfaces = new HashMap<>();
resolvedRequiredInterfaces.keySet().forEach(r -> this.satisfactionOfRequiredInterfaces.put(r, new UnparametrizedComponentInstance(resolvedRequiredInterfaces.get(r))));
this.componentName = composition.getComponent().getName();
}
public String getComponentName() {
return this.componentName;
}
public Map<String, UnparametrizedComponentInstance> getSatisfactionOfRequiredInterfaces() {
return this.satisfactionOfRequiredInterfaces;
}
/**
* Determines the sub-composition under a path of required interfaces
**/
public UnparametrizedComponentInstance getSubComposition(final List<String> path) {
UnparametrizedComponentInstance current = this;
for (String requiredInterface : path) {
if (!current.getSatisfactionOfRequiredInterfaces().containsKey(requiredInterface)) {
throw new IllegalArgumentException("Invalid path " + path + " (size " + path.size() + "). The component " + current.getComponentName() + " does not have a required interface with id \"" + requiredInterface + "\"");
}
current = current.getSatisfactionOfRequiredInterfaces().get(requiredInterface);
}
return current;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.componentName == null) ? 0 : this.componentName.hashCode());
result = prime * result + ((this.satisfactionOfRequiredInterfaces == null) ? 0 : this.satisfactionOfRequiredInterfaces.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
UnparametrizedComponentInstance other = (UnparametrizedComponentInstance) obj;
if (this.componentName == null) {
if (other.componentName != null) {
return false;
}
} else if (!this.componentName.equals(other.componentName)) {
return false;
}
if (this.satisfactionOfRequiredInterfaces == null) {
if (other.satisfactionOfRequiredInterfaces != null) {
return false;
}
} else if (!this.satisfactionOfRequiredInterfaces.equals(other.satisfactionOfRequiredInterfaces)) {
return false;
}
return true;
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("componentName", this.componentName);
fields.put("satisfactionOfRequiredInterfaces", this.satisfactionOfRequiredInterfaces);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/observers/HASCOModelStatisticsObserver.java
|
package ai.libs.hasco.observers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.events.HASCOSolutionEvent;
import ai.libs.hasco.model.UnparametrizedComponentInstance;
public class HASCOModelStatisticsObserver {
private final Map<UnparametrizedComponentInstance, List<HASCOSolutionEvent<Double>>> observedSolutionsGroupedByModuloParameters = new HashMap<>();
@Subscribe
public void receiveSolutionEvent(final HASCOSolutionEvent<Double> event) {
UnparametrizedComponentInstance comp = new UnparametrizedComponentInstance(event.getSolutionCandidate().getComponentInstance());
if (!this.observedSolutionsGroupedByModuloParameters.containsKey(comp)) {
this.observedSolutionsGroupedByModuloParameters.put(comp, new ArrayList<>());
}
this.observedSolutionsGroupedByModuloParameters.get(comp).add(event);
}
public Map<UnparametrizedComponentInstance, List<HASCOSolutionEvent<Double>>> getObservedSolutionsGroupedByModuloParameters() {
return this.observedSolutionsGroupedByModuloParameters;
}
public DescriptiveStatistics getPerformanceStatisticsForComposition(final UnparametrizedComponentInstance composition) {
DescriptiveStatistics stats = new DescriptiveStatistics();
this.observedSolutionsGroupedByModuloParameters.get(composition).forEach(e -> stats.addValue(e.getSolutionCandidate().getScore()));
return stats;
}
public Map<UnparametrizedComponentInstance, DescriptiveStatistics> getPerformanceStatisticsPerComposition() {
Map<UnparametrizedComponentInstance, DescriptiveStatistics> statsMap = new HashMap<>();
for (UnparametrizedComponentInstance composition : this.observedSolutionsGroupedByModuloParameters.keySet()) {
statsMap.put(composition, this.getPerformanceStatisticsForComposition(composition));
}
return statsMap;
}
public DescriptiveStatistics getEvaluationTimeStatisticsForComposition(final UnparametrizedComponentInstance composition) {
DescriptiveStatistics stats = new DescriptiveStatistics();
this.observedSolutionsGroupedByModuloParameters.get(composition).forEach(e -> stats.addValue(e.getSolutionCandidate().getTimeToEvaluateCandidate()));
return stats;
}
public Map<UnparametrizedComponentInstance, DescriptiveStatistics> getEvaluationTimeStatisticsPerComposition() {
return this.getPerformanceStatisticsPerComposition();
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/optimizingfactory/BaseFactory.java
|
package ai.libs.hasco.optimizingfactory;
import ai.libs.hasco.exceptions.ComponentInstantiationFailedException;
import ai.libs.hasco.model.ComponentInstance;
public interface BaseFactory<T> {
public T getComponentInstantiation(ComponentInstance groundComponent) throws ComponentInstantiationFailedException;
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/optimizingfactory/OptimizingFactory.java
|
package ai.libs.hasco.optimizingfactory;
import java.util.HashMap;
import java.util.Map;
import org.api4.java.algorithm.Timeout;
import org.api4.java.algorithm.events.IAlgorithmEvent;
import org.api4.java.algorithm.exceptions.AlgorithmException;
import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException;
import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.SoftwareConfigurationProblem;
import ai.libs.hasco.exceptions.ComponentInstantiationFailedException;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.model.EvaluatedSoftwareConfigurationSolution;
import ai.libs.jaicore.basic.algorithm.AAlgorithm;
import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent;
import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent;
import ai.libs.jaicore.logging.ToJSONStringUtil;
public class OptimizingFactory<P extends SoftwareConfigurationProblem<V>, T, C extends EvaluatedSoftwareConfigurationSolution<V>, V extends Comparable<V>> extends AAlgorithm<OptimizingFactoryProblem<P, T, V>, T> {
/* logging */
private Logger logger = LoggerFactory.getLogger(OptimizingFactory.class);
private String loggerName;
private final SoftwareConfigurationAlgorithmFactory<P, C, V, ?> factoryForOptimizationAlgorithm;
private T constructedObject;
private V performanceOfObject;
private ComponentInstance componentInstanceOfObject;
private final SoftwareConfigurationAlgorithm<P, C, V> optimizer;
public OptimizingFactory(final OptimizingFactoryProblem<P, T, V> problem, final SoftwareConfigurationAlgorithmFactory<P, C, V, ?> factoryForOptimizationAlgorithm) {
super(problem);
this.factoryForOptimizationAlgorithm = factoryForOptimizationAlgorithm;
this.optimizer = this.factoryForOptimizationAlgorithm.getAlgorithm(this.getInput().getConfigurationProblem());
this.optimizer.registerListener(new Object() {
@Subscribe
public void receiveAlgorithmEvent(final IAlgorithmEvent event) {
if (!(event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent)) {
OptimizingFactory.this.post(event);
}
}
});
}
@Override
public IAlgorithmEvent nextWithException() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException {
switch (this.getState()) {
case CREATED:
/* initialize optimizer */
if (this.loggerName != null) {
this.logger.info("Setting logger of optimizer {} to {}.optAlgo", this.optimizer.getClass().getName(), this.loggerName);
this.optimizer.setLoggerName(this.loggerName + ".optAlgo");
}
IAlgorithmEvent initEvent = this.optimizer.next();
assert initEvent instanceof AlgorithmInitializedEvent : "The first event emitted by the optimizer has not been its AlgorithmInitializationEvent";
return this.activate();
case ACTIVE:
C solutionModel = this.optimizer.call();
try {
this.constructedObject = this.getInput().getBaseFactory().getComponentInstantiation(solutionModel.getComponentInstance());
this.performanceOfObject = solutionModel.getScore();
this.componentInstanceOfObject = solutionModel.getComponentInstance();
return this.terminate();
} catch (ComponentInstantiationFailedException e) {
throw new AlgorithmException("Could not conduct next step in OptimizingFactory due to an exception in the component instantiation.", e);
}
default:
throw new IllegalStateException("Cannot do anything in state " + this.getState());
}
}
@Override
public T call() throws AlgorithmException, InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException {
while (this.hasNext()) {
this.nextWithException();
}
return this.constructedObject;
}
/**
* @return the optimizer that is used for building the object
*/
public SoftwareConfigurationAlgorithm<P, C, V> getOptimizer() {
return this.optimizer;
}
public AlgorithmInitializedEvent init() {
IAlgorithmEvent e = null;
while (this.hasNext()) {
e = this.next();
if (e instanceof AlgorithmInitializedEvent) {
return (AlgorithmInitializedEvent) e;
}
}
throw new IllegalStateException("Could not complete initialization");
}
public V getPerformanceOfObject() {
return this.performanceOfObject;
}
public ComponentInstance getComponentInstanceOfObject() {
return this.componentInstanceOfObject;
}
@Override
public String getLoggerName() {
return this.loggerName;
}
@Override
public void setLoggerName(final String name) {
this.logger.info("Switching logger from {} to {}", this.logger.getName(), name);
this.loggerName = name;
this.logger = LoggerFactory.getLogger(name);
this.logger.info("Activated logger {} with name {}", name, this.logger.getName());
super.setLoggerName(this.loggerName + "._algorithm");
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("factoryForOptimizationAlgorithm", this.factoryForOptimizationAlgorithm);
fields.put("constructedObject", this.constructedObject);
fields.put("performanceOfObject", this.performanceOfObject);
fields.put("optimizer", this.optimizer);
return ToJSONStringUtil.toJSONString(fields);
}
@Override
public void cancel() {
this.logger.info("Received cancel. First canceling the optimizer {}, then my own routine!", this.optimizer.getId());
this.optimizer.cancel();
this.logger.debug("Now canceling the OptimizingFactory itself.");
super.cancel();
assert this.isCanceled() : "Cancel-flag must be true at end of cancel routine!";
}
@Override
public void setTimeout(final Timeout to) {
super.setTimeout(to);
this.logger.info("Forwarding timeout {} to optimizer.", to);
this.optimizer.setTimeout(to);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/optimizingfactory/OptimizingFactoryProblem.java
|
package ai.libs.hasco.optimizingfactory;
import ai.libs.hasco.core.SoftwareConfigurationProblem;
public class OptimizingFactoryProblem<P extends SoftwareConfigurationProblem<V>, T, V extends Comparable<V>> {
private final BaseFactory<T> baseFactory;
private final P configurationProblem;
public OptimizingFactoryProblem(BaseFactory<T> baseFactory, P configurationProblem) {
super();
this.baseFactory = baseFactory;
this.configurationProblem = configurationProblem;
}
public BaseFactory<T> getBaseFactory() {
return baseFactory;
}
public P getConfigurationProblem() {
return configurationProblem;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/optimizingfactory/SoftwareConfigurationAlgorithm.java
|
package ai.libs.hasco.optimizingfactory;
import ai.libs.hasco.core.SoftwareConfigurationProblem;
import ai.libs.hasco.model.EvaluatedSoftwareConfigurationSolution;
import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig;
import ai.libs.jaicore.basic.algorithm.AOptimizer;
public abstract class SoftwareConfigurationAlgorithm<P extends SoftwareConfigurationProblem<V>, O extends EvaluatedSoftwareConfigurationSolution<V>, V extends Comparable<V>> extends AOptimizer<P, O, V> {
protected SoftwareConfigurationAlgorithm(final P input) {
super(input);
}
protected SoftwareConfigurationAlgorithm(final IOwnerBasedAlgorithmConfig config, final P input) {
super(config, input);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/optimizingfactory/SoftwareConfigurationAlgorithmFactory.java
|
package ai.libs.hasco.optimizingfactory;
import org.api4.java.algorithm.IAlgorithmFactory;
import ai.libs.hasco.core.SoftwareConfigurationProblem;
import ai.libs.hasco.model.EvaluatedSoftwareConfigurationSolution;
public interface SoftwareConfigurationAlgorithmFactory<P extends SoftwareConfigurationProblem<V>, O extends EvaluatedSoftwareConfigurationSolution<V>, V extends Comparable<V>, A extends SoftwareConfigurationAlgorithm<P, O, V>> extends IAlgorithmFactory<P, O, A> {
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/reduction/HASCOReduction.java
|
package ai.libs.hasco.reduction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Supplier;
import org.api4.java.datastructure.graph.implicit.IGraphGenerator;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.core.IHASCOPlanningReduction;
import ai.libs.hasco.core.IsRefinementCompletedPredicate;
import ai.libs.hasco.core.IsValidParameterRangeRefinementPredicate;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.hasco.core.Util;
import ai.libs.hasco.core.IsNotRefinable;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.model.NumericParameterDomain;
import ai.libs.hasco.model.Parameter;
import ai.libs.hasco.model.ParameterRefinementConfiguration;
import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction;
import ai.libs.jaicore.logic.fol.structure.CNFFormula;
import ai.libs.jaicore.logic.fol.structure.ConstantParam;
import ai.libs.jaicore.logic.fol.structure.Literal;
import ai.libs.jaicore.logic.fol.structure.LiteralParam;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.logic.fol.structure.VariableParam;
import ai.libs.jaicore.logic.fol.theories.EvaluablePredicate;
import ai.libs.jaicore.planning.classical.problems.ceoc.CEOCOperation;
import ai.libs.jaicore.planning.core.interfaces.IEvaluatedPlan;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningDomain;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.OCIPMethod;
import ai.libs.jaicore.planning.hierarchical.problems.htn.CostSensitiveHTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.stn.TaskNetwork;
/**
* This is the class that conducts the actual problem reduction of software configuration to HTN Planning
*
* @author fmohr
*
*/
public class HASCOReduction<V extends Comparable<V>>
implements AlgorithmicProblemReduction<RefinementConfiguredSoftwareConfigurationProblem<V>, ComponentInstance, CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V>, IEvaluatedPlan<V>> {
private static final boolean CONFIGURE_PARAMS = true; // this could be determined automatically later
// component selection
private static final String RESOLVE_COMPONENT_IFACE_PREFIX = "1_tResolve";
private static final String SATISFY_PREFIX = "1_satisfy";
// component configuration
private static final String REFINE_PARAMETERS_PREFIX = "2_tRefineParamsOf";
private static final String REFINE_PARAMETER_PREFIX = "2_tRefineParam";
private static final String DECLARE_CLOSED_PREFIX = "2_declareClosed";
private static final String REDEF_VALUE_PREFIX = "2_redefValue";
private static final String COMPONENT_OF_C1 = "component(c1)";
private RefinementConfiguredSoftwareConfigurationProblem<V> originalProblem;
/* working variables */
private Collection<Component> components;
private Map<Component, Map<Parameter, ParameterRefinementConfiguration>> paramRefinementConfig;
private Supplier<HASCOSolutionCandidate<V>> bestSolutionSupplier;
public HASCOReduction(final Supplier<HASCOSolutionCandidate<V>> bestSolutionSupplier) {
this.bestSolutionSupplier = bestSolutionSupplier;
}
public Monom getInitState() {
if (this.originalProblem == null) {
throw new IllegalStateException("Cannot compute init state before transformation has been invoked.");
}
Monom init = new Monom();
this.getExistingInterfaces().forEach(s -> init.add(new Literal("iface('" + s + "')")));
init.add(new Literal("component('request')"));
return init;
}
public Collection<String> getExistingInterfaces() {
if (this.originalProblem == null) {
throw new IllegalStateException("Cannot compute existing interfaces before transformation has been invoked.");
}
Collection<String> ifaces = new HashSet<>();
for (Component c : this.components) {
ifaces.addAll(c.getProvidedInterfaces());
ifaces.addAll(c.getRequiredInterfaces().values());
}
return ifaces;
}
public CEOCIPSTNPlanningDomain getPlanningDomain() {
/* create operations */
Collection<CEOCOperation> operations = new ArrayList<>();
for (Component c : this.components) {
for (String i : c.getProvidedInterfaces()) {
List<VariableParam> params = new ArrayList<>();
params.add(new VariableParam("c1"));
params.add(new VariableParam("c2"));
int j = 0;
Map<CNFFormula, Monom> addList = new HashMap<>();
Monom standardKnowledgeAboutNewComponent = new Monom("component(c2) & resolves(c1, '" + i + "', '" + c.getName() + "'," + " c2" + ")");
for (Parameter p : c.getParameters()) {
String paramIdentifier = "p" + (++j);
params.add(new VariableParam(paramIdentifier));
/* add the information about this parameter container */
List<LiteralParam> literalParams = new ArrayList<>();
literalParams.clear();
literalParams.add(new ConstantParam(c.getName()));
literalParams.add(new ConstantParam(p.getName()));
literalParams.add(new VariableParam("c2"));
literalParams.add(new VariableParam(paramIdentifier));
standardKnowledgeAboutNewComponent.add(new Literal("parameterContainer", literalParams));
/* add knowledge about initial value */
List<LiteralParam> valParams = new ArrayList<>();
valParams.add(new VariableParam(paramIdentifier));
if (p.isNumeric()) {
standardKnowledgeAboutNewComponent.add(new Literal("parameterFocus(c2, '" + p.getName() + "', '" + this.paramRefinementConfig.get(c).get(p).getFocusPoint() + "')"));
NumericParameterDomain np = (NumericParameterDomain) p.getDefaultDomain();
valParams.add(new ConstantParam("[" + np.getMin() + "," + np.getMax() + "]"));
} else {
valParams.add(new ConstantParam(p.getDefaultValue().toString()));
}
standardKnowledgeAboutNewComponent.add(new Literal("val", valParams));
}
int k = 0;
for (String requiredInterfaceID : c.getRequiredInterfaces().keySet()) {
String reqIntIdentifier = "sc" + (++k);
params.add(new VariableParam(reqIntIdentifier));
List<LiteralParam> literalParams = new ArrayList<>();
literalParams.clear();
literalParams.add(new ConstantParam(c.getName()));
literalParams.add(new ConstantParam(requiredInterfaceID));
literalParams.add(new VariableParam("c2"));
literalParams.add(new VariableParam(reqIntIdentifier));
standardKnowledgeAboutNewComponent.add(new Literal("interfaceIdentifier", literalParams));
}
addList.put(new CNFFormula(), standardKnowledgeAboutNewComponent);
CEOCOperation newOp = new CEOCOperation(SATISFY_PREFIX + i + "With" + c.getName(), params, new Monom(COMPONENT_OF_C1), addList, new HashMap<>(), new ArrayList<>());
operations.add(newOp);
}
}
/* create operations for parameter initialization */
Map<CNFFormula, Monom> addList = new HashMap<>();
addList.put(new CNFFormula(), new Monom("val(container,newValue) & overwritten(container)"));
Map<CNFFormula, Monom> deleteList = new HashMap<>();
deleteList.put(new CNFFormula(), new Monom("val(container,previousValue)"));
operations.add(new CEOCOperation(REDEF_VALUE_PREFIX, "container,previousValue,newValue", new Monom("val(container,previousValue)"), addList, deleteList, ""));
addList = new HashMap<>();
addList.put(new CNFFormula(), new Monom("closed(container)"));
deleteList = new HashMap<>();
operations.add(new CEOCOperation(DECLARE_CLOSED_PREFIX, "container", new Monom(), addList, deleteList, ""));
/* create methods */
Collection<OCIPMethod> methods = new ArrayList<>();
for (Component c : this.components) {
/*
* create methods for the refinement of the interfaces offered by this component
*/
for (String i : c.getProvidedInterfaces()) {
List<VariableParam> params = new ArrayList<>();
VariableParam inputParam = new VariableParam("c1");
params.add(inputParam);
params.add(new VariableParam("c2"));
Map<String, String> requiredInterfaces = c.getRequiredInterfaces();
List<Literal> network = new ArrayList<>();
StringBuilder refinementArgumentsSB = new StringBuilder();
int j = 0;
if (CONFIGURE_PARAMS) {
for (j = 1; j <= c.getParameters().size(); j++) {
String paramIdentifier = "p" + j;
refinementArgumentsSB.append(", " + paramIdentifier);
}
}
for (int k = 1; k <= requiredInterfaces.entrySet().size(); k++) {
refinementArgumentsSB.append(",sc" + k);
}
int sc = 0;
network.add(new Literal(SATISFY_PREFIX + i + "With" + c.getName() + "(" + "c1" + "," + "c2" + refinementArgumentsSB.toString() + ")"));
for (Entry<String, String> requiredInterface : requiredInterfaces.entrySet()) {
String paramName = "sc" + (++sc);
params.add(new VariableParam(paramName));
network.add(new Literal(RESOLVE_COMPONENT_IFACE_PREFIX + requiredInterface.getValue() + "(c2," + paramName + ")"));
}
refinementArgumentsSB = new StringBuilder();
if (CONFIGURE_PARAMS) {
for (j = 1; j <= c.getParameters().size(); j++) {
String paramIdentifier = "p" + j;
params.add(new VariableParam(paramIdentifier));
refinementArgumentsSB.append(", " + paramIdentifier);
}
}
network.add(new Literal(REFINE_PARAMETERS_PREFIX + c.getName() + "(" + "c1" + "," + "c2" + refinementArgumentsSB.toString() + ")"));
List<VariableParam> outputs = new ArrayList<>(params);
outputs.remove(inputParam);
methods.add(new OCIPMethod("resolve" + i + "With" + c.getName(), params, new Literal(RESOLVE_COMPONENT_IFACE_PREFIX + i + "(c1,c2)"), new Monom(COMPONENT_OF_C1), new TaskNetwork(network), false, outputs, new Monom()));
}
/* create methods for choosing/refining parameters */
List<VariableParam> params = new ArrayList<>();
params.add(new VariableParam("c1"));
List<Literal> initNetwork = new ArrayList<>();
StringBuilder refinementArgumentsSB = new StringBuilder();
int j = 0;
/*
* go, in an ordering that is consistent with the pre-order on the params
* imposed by the dependencies, over the set of params
*/
if (CONFIGURE_PARAMS) {
for (Parameter p : c.getParameters()) {
String paramName = "p" + (++j);
refinementArgumentsSB.append(", " + paramName);
params.add(new VariableParam(paramName));
initNetwork.add(new Literal(REFINE_PARAMETER_PREFIX + p.getName() + "Of" + c.getName() + "(c2, " + paramName + ")"));
methods.add(new OCIPMethod("ignoreParamRefinementFor" + p.getName() + "Of" + c.getName(), "object, container, curval", new Literal(REFINE_PARAMETER_PREFIX + p.getName() + "Of" + c.getName() + "(object,container)"),
new Monom("parameterContainer('" + c.getName() + "', '" + p.getName() + "', object, container) & val(container,curval) & overwritten(container)"), new TaskNetwork(DECLARE_CLOSED_PREFIX + "(container)"), false,
"", new Monom("notRefinable('" + c.getName() + "', object, '" + p.getName() + "', container, curval)")));
methods.add(new OCIPMethod("refineParam" + p.getName() + "Of" + c.getName(), "object, container, curval, newval", new Literal(REFINE_PARAMETER_PREFIX + p.getName() + "Of" + c.getName() + "(object,container)"),
new Monom("parameterContainer('" + c.getName() + "', '" + p.getName() + "', object, container) & val(container,curval)"), new TaskNetwork(REDEF_VALUE_PREFIX + "(container,curval,newval)"), false, "",
new Monom("isValidParameterRangeRefinement('" + c.getName() + "', object, '" + p.getName() + "', container, curval, newval)")));
}
initNetwork.add(new Literal(REFINE_PARAMETERS_PREFIX + c.getName() + "(" + "c1" + "," + "c2" + refinementArgumentsSB.toString() + ")"));
params = new ArrayList<>(params);
params.add(1, new VariableParam("c2"));
methods.add(new OCIPMethod("refineParamsOf" + c.getName(), params, new Literal(REFINE_PARAMETERS_PREFIX + c.getName() + "(c1,c2" + refinementArgumentsSB.toString() + ")"), new Monom(COMPONENT_OF_C1),
new TaskNetwork(initNetwork), false, new ArrayList<>(), new Monom("!refinementCompleted('" + c.getName() + "', c2)")));
methods.add(new OCIPMethod("closeRefinementOfParamsOf" + c.getName(), params, new Literal(REFINE_PARAMETERS_PREFIX + c.getName() + "(c1,c2" + refinementArgumentsSB.toString() + ")"), new Monom(COMPONENT_OF_C1),
new TaskNetwork(), false, new ArrayList<>(), new Monom("refinementCompleted('" + c.getName() + "', c2)")));
}
}
return new CEOCIPSTNPlanningDomain(operations, methods);
}
public CEOCIPSTNPlanningProblem getPlanningProblem(final CEOCIPSTNPlanningDomain domain, final CNFFormula knowledge, final Monom init) {
Map<String, EvaluablePredicate> evaluablePredicates = new HashMap<>();
evaluablePredicates.put("isValidParameterRangeRefinement", new IsValidParameterRangeRefinementPredicate(this.components, this.paramRefinementConfig));
evaluablePredicates.put("notRefinable", new IsNotRefinable(this.components, this.paramRefinementConfig));
evaluablePredicates.put("refinementCompleted", new IsRefinementCompletedPredicate(this.components, this.paramRefinementConfig));
return new CEOCIPSTNPlanningProblem(domain, knowledge, init, new TaskNetwork(RESOLVE_COMPONENT_IFACE_PREFIX + this.originalProblem.getRequiredInterface() + "('request', 'solution')"), evaluablePredicates, new HashMap<>());
}
public CEOCIPSTNPlanningProblem getPlanningProblem() {
return this.getPlanningProblem(this.getPlanningDomain(), new CNFFormula(), this.getInitState());
}
/**
* This method is a utility for everybody who wants to work on the graph obtained from HASCO's reduction but without using the search logic of HASCO
*
* @param plannerFactory
* @return
*/
public <T, A> IGraphGenerator<T, A> getGraphGeneratorUsedByHASCOForSpecificPlanner(final IHASCOPlanningReduction<T, A> transformer) {
return transformer.encodeProblem(this.getPlanningProblem()).getGraphGenerator();
}
@Override
public CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> encodeProblem(final RefinementConfiguredSoftwareConfigurationProblem<V> problem) {
if (problem.getRequiredInterface() == null) {
throw new IllegalArgumentException("No required interface defined in the problem!");
}
/* set object variables that will be important for several methods in the reduction */
this.originalProblem = problem;
this.components = this.originalProblem.getComponents();
this.paramRefinementConfig = this.originalProblem.getParamRefinementConfig();
/* build the cost insensitive planning problem */
CEOCIPSTNPlanningProblem planningProblem = this.getPlanningProblem();
/* derive a plan evaluator from the configuration evaluator */
return new CostSensitiveHTNPlanningProblem<>(planningProblem, new HASCOReductionSolutionEvaluator<>(problem, this));
}
@Override
public ComponentInstance decodeSolution(final IEvaluatedPlan<V> solution) {
return this.decodeSolution((IPlan) solution);
}
public ComponentInstance decodeSolution(final IPlan plan) {
return Util.getSolutionCompositionForPlan(HASCOReduction.this.components, HASCOReduction.this.getInitState(), plan, true);
}
public Supplier<HASCOSolutionCandidate<V>> getBestSolutionSupplier() {
return this.bestSolutionSupplier;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/reduction/HASCOReductionSolutionEvaluator.java
|
package ai.libs.hasco.reduction;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import org.api4.java.common.attributedobjects.IInformedObjectEvaluatorExtension;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException;
import org.api4.java.common.control.ILoggingCustomizable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.jaicore.logging.ToJSONStringUtil;
import ai.libs.jaicore.planning.core.Action;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
public class HASCOReductionSolutionEvaluator<V extends Comparable<V>> implements IObjectEvaluator<IPlan, V>, ILoggingCustomizable {
private Logger logger = LoggerFactory.getLogger(HASCOReductionSolutionEvaluator.class);
private final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem;
private final HASCOReduction<V> reduction;
private final IObjectEvaluator<ComponentInstance, V> evaluator;
public HASCOReductionSolutionEvaluator(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final HASCOReduction<V> reduction) {
super();
this.configurationProblem = configurationProblem;
this.reduction = reduction;
this.evaluator = this.configurationProblem.getCompositionEvaluator();
}
@SuppressWarnings("unchecked")
@Override
public V evaluate(final IPlan plan) throws InterruptedException, ObjectEvaluationFailedException {
ComponentInstance solution = this.reduction.decodeSolution(plan);
if (solution == null) {
throw new IllegalArgumentException("The following plan yields a null solution: \n\t" + plan.getActions().stream().map(Action::getEncoding).collect(Collectors.joining("\n\t")));
}
if (this.evaluator instanceof IInformedObjectEvaluatorExtension && this.reduction.getBestSolutionSupplier().get() != null) {
((IInformedObjectEvaluatorExtension<V>) this.evaluator).informAboutBestScore(this.reduction.getBestSolutionSupplier().get().getScore());
}
this.logger.info("Forwarding evaluation request to evaluator {}", this.evaluator.getClass().getName());
return this.evaluator.evaluate(solution);
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("problem", this.configurationProblem);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
@Override
public String getLoggerName() {
return this.logger.getName();
}
@Override
public void setLoggerName(final String name) {
this.logger = LoggerFactory.getLogger(name);
if (this.evaluator instanceof ILoggingCustomizable) {
this.logger.info("Setting logger of evaluator {} to {}.be", this.evaluator.getClass().getName(), name);
((ILoggingCustomizable) this.evaluator).setLoggerName(name + ".be");
}
else {
this.logger.info("Evaluator {} cannot be customized for logging, so not configuring its logger.", this.evaluator.getClass().getName());
}
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ComponentInstanceDeserializer.java
|
package ai.libs.hasco.serialization;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.TreeNode;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
public class ComponentInstanceDeserializer extends StdDeserializer<ComponentInstance> {
/**
*
*/
private static final long serialVersionUID = 4216559441244072999L;
private transient Collection<Component> possibleComponents; // the idea is not to serialize the deserializer, so this can be transient
public ComponentInstanceDeserializer(final Collection<Component> possibleComponents) {
super(ComponentInstance.class);
this.possibleComponents = possibleComponents;
}
public ComponentInstance readFromJson(final String json) throws IOException {
return this.readAsTree(new ObjectMapper().readTree(json));
}
@SuppressWarnings("unchecked")
public ComponentInstance readAsTree(final TreeNode p) throws IOException {
ObjectMapper mapper = new ObjectMapper();
// read the parameter values
Map<String, String> parameterValues = mapper.treeToValue(p.get("params"), HashMap.class);
// read the component
String componentName = p.get("component").toString().replaceAll("\"", "");
Component component = this.possibleComponents.stream().filter(c -> c.getName().equals(componentName)).findFirst()
.orElseThrow(NoSuchElementException::new);
Map<String, ComponentInstance> satisfactionOfRequiredInterfaces = new HashMap<>();
// recursively resolve the requiredInterfaces
TreeNode n = p.get("requiredInterfaces");
Iterator<String> fields = n.fieldNames();
while (fields.hasNext()) {
String key = fields.next();
satisfactionOfRequiredInterfaces.put(key, this.readAsTree(n.get(key)));
}
return new ComponentInstance(component, parameterValues, satisfactionOfRequiredInterfaces);
}
@Override
public ComponentInstance deserialize(final JsonParser p, final DeserializationContext ctxt) throws IOException {
return this.readAsTree(p.readValueAsTree());
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ComponentLoader.java
|
package ai.libs.hasco.serialization;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.math3.geometry.euclidean.oned.Interval;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import ai.libs.hasco.model.BooleanParameterDomain;
import ai.libs.hasco.model.CategoricalParameterDomain;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.Dependency;
import ai.libs.hasco.model.IParameterDomain;
import ai.libs.hasco.model.NumericParameterDomain;
import ai.libs.hasco.model.Parameter;
import ai.libs.hasco.model.ParameterRefinementConfiguration;
import ai.libs.jaicore.basic.FileUtil;
import ai.libs.jaicore.basic.ResourceFile;
import ai.libs.jaicore.basic.ResourceUtil;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.basic.sets.SetUtil;
public class ComponentLoader {
private static final Logger L = LoggerFactory.getLogger(ComponentLoader.class);
private static final String STR_VALUES = "values";
private static final String STR_DEFAULT = "default";
private static final String MSG_CANNOT_PARSE_LITERAL = "Cannot parse literal ";
private static final String MSG_DOMAIN_NOT_SUPPORTED = "Currently no support for parameters with domain \"";
private final Map<Component, Map<Parameter, ParameterRefinementConfiguration>> paramConfigs = new HashMap<>();
private final Collection<Component> components = new ArrayList<>();
private final Set<String> parsedFiles = new HashSet<>();
private final ObjectMapper objectMapper = new ObjectMapper();
private final Map<String, JsonNode> parameterMap = new HashMap<>();
private final Set<String> uniqueComponentNames = new HashSet<>();
private final Set<String> requiredInterfaces = new HashSet<>();
private final Set<String> providedInterfaces = new HashSet<>();
private final Map<String, JsonNode> componentMap = new HashMap<>();
private final boolean checkRequiredInterfacesResolvable;
public ComponentLoader() {
this(false);
}
public ComponentLoader(final boolean checkRequiredInterfacesResolvable) {
this.checkRequiredInterfacesResolvable = checkRequiredInterfacesResolvable;
}
public ComponentLoader(final File jsonFile) throws IOException {
this();
this.loadComponents(jsonFile);
}
public ComponentLoader(final File jsonFile, final boolean checkRequiredInterfacesResolvable) throws IOException {
this(checkRequiredInterfacesResolvable);
this.loadComponents(jsonFile);
}
private void parseFile(final File jsonFile) throws IOException {
L.debug("Parse file {}...", jsonFile.getAbsolutePath());
String jsonDescription;
if (jsonFile instanceof ResourceFile) {
jsonDescription = ResourceUtil.readResourceFileToString(((ResourceFile) jsonFile).getPathName());
} else {
jsonDescription = FileUtil.readFileAsString(jsonFile);
}
jsonDescription = jsonDescription.replaceAll("/\\*(.*)\\*/", "");
JsonNode rootNode = this.objectMapper.readTree(jsonDescription);
for (JsonNode elem : rootNode.path("parameters")) {
this.parameterMap.put(elem.get("name").asText(), elem);
}
JsonNode includes = rootNode.path("include");
File baseFolder = jsonFile.getParentFile();
for (JsonNode includePathNode : includes) {
String path = includePathNode.asText();
File subFile;
if (baseFolder instanceof ResourceFile) {
subFile = new ResourceFile((ResourceFile) baseFolder, path);
} else {
subFile = new File(baseFolder, path);
}
if (!this.parsedFiles.contains(subFile.getCanonicalPath())) {
this.parsedFiles.add(subFile.getCanonicalPath());
this.parseFile(subFile);
}
}
this.readFromJson(rootNode);
}
public void readFromString(final String json) throws IOException {
ObjectMapper mapper = new ObjectMapper();
this.readFromJson(mapper.readTree(json));
}
private void readFromJson(final JsonNode rootNode) throws IOException {
// get the array of components
JsonNode describedComponents = rootNode.path("components");
if (describedComponents != null) {
Component c;
for (JsonNode component : describedComponents) {
c = new Component(component.get("name").asText());
this.componentMap.put(c.getName(), component);
if (!this.uniqueComponentNames.add(c.getName())) {
throw new IllegalArgumentException("Noticed a component with duplicative component name: " + c.getName());
}
// add provided interfaces
for (JsonNode providedInterface : component.path("providedInterface")) {
c.addProvidedInterface(providedInterface.asText());
}
// add required interfaces
for (JsonNode requiredInterface : component.path("requiredInterface")) {
if (!requiredInterface.has("id")) {
throw new IOException("No id has been specified for a required interface of " + c.getName());
}
if (!requiredInterface.has("name")) {
throw new IOException("No name has been specified for a required interface of " + c.getName());
}
c.addRequiredInterface(requiredInterface.get("id").asText(), requiredInterface.get("name").asText());
}
Map<Parameter, ParameterRefinementConfiguration> paramConfig = new HashMap<>();
for (JsonNode parameter : component.path("parameter")) {
// name of the parameter
String name = parameter.get("name").asText();
// possible string params
String[] stringParams = new String[] { "type", STR_VALUES, STR_DEFAULT };
String[] stringParamValues = new String[stringParams.length];
// possible boolean params
String[] boolParams = new String[] { STR_DEFAULT, "includeExtremals" };
boolean[] boolParamValues = new boolean[boolParams.length];
// possible double params
String[] doubleParams = new String[] { STR_DEFAULT, "min", "max", "refineSplits", "minInterval" };
double[] doubleParamValues = new double[doubleParams.length];
if (this.parameterMap.containsKey(name)) {
JsonNode commonParameter = this.parameterMap.get(name);
// get string parameter values from common parameter
for (int i = 0; i < stringParams.length; i++) {
if (commonParameter.get(stringParams[i]) != null) {
stringParamValues[i] = commonParameter.get(stringParams[i]).asText();
}
}
// get double parameter values from common parameter
for (int i = 0; i < doubleParams.length; i++) {
if (commonParameter.get(doubleParams[i]) != null) {
doubleParamValues[i] = commonParameter.get(doubleParams[i]).asDouble();
}
}
// get boolean parameter values from common parameter
for (int i = 0; i < boolParams.length; i++) {
if (commonParameter.get(boolParams[i]) != null) {
boolParamValues[i] = commonParameter.get(boolParams[i]).asBoolean();
}
}
}
// get string parameter values from current parameter
for (int i = 0; i < stringParams.length; i++) {
if (parameter.get(stringParams[i]) != null) {
stringParamValues[i] = parameter.get(stringParams[i]).asText();
}
}
// get double parameter values from current parameter
for (int i = 0; i < doubleParams.length; i++) {
if (parameter.get(doubleParams[i]) != null) {
doubleParamValues[i] = parameter.get(doubleParams[i]).asDouble();
}
}
// get boolean parameter values from current parameter
for (int i = 0; i < boolParams.length; i++) {
if (parameter.get(boolParams[i]) != null) {
boolParamValues[i] = parameter.get(boolParams[i]).asBoolean();
}
}
Parameter p = null;
String type = stringParamValues[Arrays.stream(stringParams).collect(Collectors.toList()).indexOf("type")];
switch (type) {
case "int":
case "int-log":
case "double":
case "double-log":
p = new Parameter(name, new NumericParameterDomain(type.equals("int") || type.equals("int-log"), doubleParamValues[1], doubleParamValues[2]), doubleParamValues[0]);
if (doubleParamValues[3] == 0) {
throw new IllegalArgumentException("Please specify the parameter \"refineSplits\" for the parameter \"" + p.getName() + "\" in component \"" + c.getName() + "\"");
}
if (doubleParamValues[4] <= 0) {
throw new IllegalArgumentException("Please specify a strictly positive parameter value for \"minInterval\" for the parameter \"" + p.getName() + "\" in component \"" + c.getName() + "\"");
}
if (type.endsWith("-log")) {
paramConfig.put(p, new ParameterRefinementConfiguration(parameter.get("focus").asDouble(), parameter.get("basis").asDouble(), boolParamValues[1], (int) doubleParamValues[3], doubleParamValues[4]));
} else {
paramConfig.put(p, new ParameterRefinementConfiguration(boolParamValues[1], (int) doubleParamValues[3], doubleParamValues[4]));
}
break;
case "bool":
case "boolean":
p = new Parameter(name, new BooleanParameterDomain(), boolParamValues[0]);
break;
case "cat":
if (parameter.get(STR_VALUES) != null && parameter.get(STR_VALUES).isTextual()) {
p = new Parameter(name, new CategoricalParameterDomain(Arrays.stream(stringParamValues[1].split(",")).collect(Collectors.toList())), stringParamValues[2]);
} else {
List<String> values = new LinkedList<>();
if (parameter.get(STR_VALUES) != null) {
for (JsonNode value : parameter.get(STR_VALUES)) {
values.add(value.asText());
}
} else if (this.parameterMap.containsKey(name)) {
for (JsonNode value : this.parameterMap.get(name).get(STR_VALUES)) {
values.add(value.asText());
}
} else {
L.error("Warning: Categorical parameter {} in component {} without value list.", name, c.getName());
}
p = new Parameter(name, new CategoricalParameterDomain(values), stringParamValues[2]);
}
break;
default:
throw new IllegalArgumentException("Unsupported parameter type " + type);
}
if (p != null) {
c.addParameter(p);
}
}
/* now parse dependencies */
for (JsonNode dependency : component.path("dependencies")) {
/* parse precondition */
String pre = dependency.get("pre").asText();
Collection<Collection<Pair<Parameter, IParameterDomain>>> premise = new ArrayList<>();
Collection<String> monoms = Arrays.asList(pre.split("\\|"));
for (String monom : monoms) {
Collection<String> literals = Arrays.asList(monom.split("&"));
Collection<Pair<Parameter, IParameterDomain>> monomInPremise = new ArrayList<>();
for (String literal : literals) {
String[] parts = literal.trim().split(" ");
if (parts.length != 3) {
throw new IllegalArgumentException(MSG_CANNOT_PARSE_LITERAL + literal + ". Literals must be of the form \"<a> P <b>\".");
}
Parameter param = c.getParameterWithName(parts[0]);
String target = parts[2];
switch (parts[1]) {
case "=":
Pair<Parameter, IParameterDomain> eqConditionItem;
if (param.isNumeric()) {
double val = Double.parseDouble(target);
eqConditionItem = new Pair<>(param, new NumericParameterDomain(((NumericParameterDomain) param.getDefaultDomain()).isInteger(), val, val));
} else if (param.isCategorical()) {
eqConditionItem = new Pair<>(param, new CategoricalParameterDomain(new String[] { target }));
} else {
throw new IllegalArgumentException(MSG_DOMAIN_NOT_SUPPORTED+ param.getDefaultDomain().getClass().getName() + "\"");
}
monomInPremise.add(eqConditionItem);
break;
case "in":
Pair<Parameter, IParameterDomain> inConditionItem;
if (param.isNumeric()) {
Interval interval = SetUtil.unserializeInterval("[" + target.substring(1, target.length() - 1) + "]");
inConditionItem = new Pair<>(param, new NumericParameterDomain(((NumericParameterDomain) param.getDefaultDomain()).isInteger(), interval.getInf(), interval.getSup()));
} else if (param.isCategorical()) {
if (!target.startsWith("[") && !target.startsWith("{")) {
throw new IllegalArgumentException("Illegal literal \"" + literal + "\" in the postcondition of dependency. This should be a set, but the target is not described by [...] or {...}");
}
Collection<String> values = target.startsWith("[") ? SetUtil.unserializeList(target) : SetUtil.unserializeSet(target);
inConditionItem = new Pair<>(param, new CategoricalParameterDomain(values));
} else {
throw new IllegalArgumentException(MSG_DOMAIN_NOT_SUPPORTED + param.getDefaultDomain().getClass().getName() + "\"");
}
monomInPremise.add(inConditionItem);
break;
default:
throw new IllegalArgumentException(MSG_CANNOT_PARSE_LITERAL + literal + ". Currently no support for predicate \"" + parts[1] + "\".");
}
}
premise.add(monomInPremise);
}
/* parse postcondition */
Collection<Pair<Parameter, IParameterDomain>> conclusion = new ArrayList<>();
String post = dependency.get("post").asText();
Collection<String> literals = Arrays.asList(post.split("&"));
for (String literal : literals) {
String[] parts = literal.trim().split(" ");
if (parts.length < 3) {
throw new IllegalArgumentException(MSG_CANNOT_PARSE_LITERAL + literal + ". Literals must be of the form \"<a> P <b>\".");
}
if (parts.length > 3) {
for (int i = 3; i < parts.length; i++) {
parts[2] += " " + parts[i];
}
}
Parameter param = c.getParameterWithName(parts[0]);
String target = parts[2];
switch (parts[1]) {
case "=":
Pair<Parameter, IParameterDomain> eqConditionItem;
if (param.isNumeric()) {
double val = Double.parseDouble(target);
eqConditionItem = new Pair<>(param, new NumericParameterDomain(((NumericParameterDomain) param.getDefaultDomain()).isInteger(), val, val));
} else if (param.isCategorical()) {
eqConditionItem = new Pair<>(param, new CategoricalParameterDomain(new String[] { target }));
} else {
throw new IllegalArgumentException(MSG_DOMAIN_NOT_SUPPORTED + param.getDefaultDomain().getClass().getName() + "\"");
}
conclusion.add(eqConditionItem);
break;
case "in":
Pair<Parameter, IParameterDomain> inConditionItem;
if (param.isNumeric()) {
Interval interval = SetUtil.unserializeInterval("[" + target.substring(1, target.length() - 1) + "]");
inConditionItem = new Pair<>(param, new NumericParameterDomain(((NumericParameterDomain) param.getDefaultDomain()).isInteger(), interval.getInf(), interval.getSup()));
} else if (param.isCategorical()) {
if (!target.startsWith("[") && !target.startsWith("{")) {
throw new IllegalArgumentException("Illegal literal \"" + literal + "\" in the postcondition of dependency. This should be a set, but the target is not described by [...] or {...}");
}
Collection<String> values = target.startsWith("[") ? SetUtil.unserializeList(target) : SetUtil.unserializeSet(target);
inConditionItem = new Pair<>(param, new CategoricalParameterDomain(values));
} else {
throw new IllegalArgumentException(MSG_DOMAIN_NOT_SUPPORTED + param.getDefaultDomain().getClass().getName() + "\"");
}
conclusion.add(inConditionItem);
break;
default:
throw new IllegalArgumentException(MSG_CANNOT_PARSE_LITERAL + literal + ". Currently no support for predicate \"" + parts[1] + "\".");
}
}
/* add dependency to the component */
c.addDependency(new Dependency(premise, conclusion));
}
this.paramConfigs.put(c, paramConfig);
this.components.add(c);
this.requiredInterfaces.addAll(c.getRequiredInterfaces().values());
this.providedInterfaces.addAll(c.getProvidedInterfaces());
}
}
}
public ComponentLoader loadComponents(final File componentDescriptionFile) throws IOException {
this.paramConfigs.clear();
this.components.clear();
this.uniqueComponentNames.clear();
this.requiredInterfaces.clear();
this.providedInterfaces.clear();
this.parseFile(componentDescriptionFile);
if (this.checkRequiredInterfacesResolvable && !this.getUnresolvableRequiredInterfaces().isEmpty()) {
throw new UnresolvableRequiredInterfaceException();
}
return this;
}
/**
* @return Returns the collection of required interfaces that cannot be resolved by a provided interface.
*/
public Collection<String> getUnresolvableRequiredInterfaces() {
return SetUtil.difference(this.requiredInterfaces, this.providedInterfaces);
}
/**
* @param componentName
* The name of the component.
* @return Returns the collection of required interfaces that cannot be resolved by a provided interface.
*/
public JsonNode getComponentAsJsonNode(final String componentName) {
return this.componentMap.get(componentName);
}
/**
* @return The map describing for each component individually how its parameters may be refined.
*/
public Map<Component, Map<Parameter, ParameterRefinementConfiguration>> getParamConfigs() {
return this.paramConfigs;
}
/**
* @return The collection of parsed components.
*/
public Collection<Component> getComponents() {
return this.components;
}
/**
* This method searches for a component with the given name. If such a component does not exist, a NoSuchElementException is thrown.
* @param name The name of the component in question.
* @return The component for the given name.
*/
public Component getComponentWithName(final String name) {
for (Component component : this.getComponents()) {
if (component.getName().equals(name)) {
return component;
}
}
throw new NoSuchElementException("There is no component with the requested name");
}
public static void main(final String[] args) throws IOException {
ComponentLoader cl = new ComponentLoader();
cl.loadComponents(new File("complexMLComponents.json"));
}
public Map<String, JsonNode> getJsonNodeComponents() {
return this.componentMap;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ComponentNotFoundException.java
|
package ai.libs.hasco.serialization;
public class ComponentNotFoundException extends Exception {
/**
* Generated by Eclipse
*/
private static final long serialVersionUID = -8112109551741268191L;
public ComponentNotFoundException() {
super();
}
public ComponentNotFoundException(String message) {
super(message);
}
public ComponentNotFoundException(Throwable cause) {
super(cause);
}
public ComponentNotFoundException(String message, Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ComponentUtils.java
|
package ai.libs.hasco.serialization;
import java.util.Collection;
import ai.libs.hasco.model.Component;
public class ComponentUtils {
private ComponentUtils() {
/* avoids instantiation */
}
public static Component getComponentByName(final String componentName, final Collection<Component> components) throws ComponentNotFoundException {
for (Component component : components) {
if (component.getName().equals(componentName)) {
return component;
}
}
throw new ComponentNotFoundException("No Component with this name loaded: " + componentName);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/CompositionSerializer.java
|
package ai.libs.hasco.serialization;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import ai.libs.hasco.model.ComponentInstance;
public class CompositionSerializer {
private CompositionSerializer() {
/* avoids instantiation */
}
public static ObjectNode serializeComponentInstance(final ComponentInstance instance) {
ObjectMapper om = new ObjectMapper();
ObjectNode on = om.createObjectNode();
/* define component and params */
on.put("component", instance.getComponent().getName());
ObjectNode params = om.createObjectNode();
for (String paramName : instance.getParameterValues().keySet()) {
params.put(paramName, instance.getParameterValues().get(paramName));
}
on.set("params", params);
/* define how required interfaces have been resolved */
ObjectNode requiredInterfaces = om.createObjectNode();
for (String requiredInterface : instance.getSatisfactionOfRequiredInterfaces().keySet()) {
requiredInterfaces.set(requiredInterface, serializeComponentInstance(instance.getSatisfactionOfRequiredInterfaces().get(requiredInterface)));
}
on.set("requiredInterfaces", requiredInterfaces);
return on;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/HASCOJacksonModule.java
|
package ai.libs.hasco.serialization;
import java.util.Collection;
import com.fasterxml.jackson.databind.module.SimpleModule;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
public class HASCOJacksonModule extends SimpleModule {
/**
*
*/
private static final long serialVersionUID = 1L;
public HASCOJacksonModule(Collection<Component> components) {
super();
this.addDeserializer(ComponentInstance.class, new ComponentInstanceDeserializer(components));
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ParameterDeserializer.java
|
package ai.libs.hasco.serialization;
import java.io.IOException;
import java.util.LinkedList;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import ai.libs.hasco.model.CategoricalParameterDomain;
import ai.libs.hasco.model.IParameterDomain;
import ai.libs.hasco.model.NumericParameterDomain;
import ai.libs.hasco.model.Parameter;
public class ParameterDeserializer extends StdDeserializer<Parameter> {
/**
*
*/
private static final long serialVersionUID = 1L;
public ParameterDeserializer() {
this(null);
}
public ParameterDeserializer(final Class<Parameter> vc) {
super(vc);
}
@Override
public Parameter deserialize(final JsonParser jp, final DeserializationContext ctxt) throws IOException {
JsonNode node = jp.getCodec().readTree(jp);
String name = node.get("name").asText();
boolean numeric = node.get("numeric").asBoolean();
boolean categorical = node.get("categorical").asBoolean();
boolean defaultValue = node.get("defaultValue").asBoolean();
IParameterDomain domain = null;
JsonNode domainNode = node.get("defaultDomain");
if (numeric) {
boolean isInteger = domainNode.get("integer").asBoolean();
double min = domainNode.get("min").asDouble();
double max = domainNode.get("max").asDouble();
domain = new NumericParameterDomain(isInteger, min, max);
} else if (categorical) {
LinkedList<String> values = new LinkedList<>();
JsonNode arrayNode = domainNode.get("values");
if (arrayNode.isArray()) {
for (final JsonNode valueNode : arrayNode) {
values.add(valueNode.asText());
}
}
domain = new CategoricalParameterDomain(values);
}
return new Parameter(name, domain, defaultValue);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/ParameterDomainDeserializer.java
|
package ai.libs.hasco.serialization;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
import ai.libs.hasco.model.Dependency;
import ai.libs.hasco.model.IParameterDomain;
import ai.libs.hasco.model.Parameter;
import ai.libs.jaicore.basic.sets.Pair;
public class ParameterDomainDeserializer extends StdDeserializer<Dependency> {
/**
*
*/
private static final long serialVersionUID = -3868917516989468264L;
public ParameterDomainDeserializer() {
this(null);
}
public ParameterDomainDeserializer(final Class<IParameterDomain> vc) {
super(vc);
}
@Override
public Dependency deserialize(final JsonParser p, final DeserializationContext ctxt) throws IOException {
List<Pair<Parameter, IParameterDomain>> collection1 = new LinkedList<>();
LinkedList<Collection<Pair<Parameter, IParameterDomain>>> collection2 = new LinkedList<>();
return new Dependency(collection2, collection1);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/serialization/UnresolvableRequiredInterfaceException.java
|
package ai.libs.hasco.serialization;
/**
* This exception can be thrown if components define required interfaces which cannot be resolved with the so far seen provided interfaces of components.
*
* @author wever
*/
public class UnresolvableRequiredInterfaceException extends RuntimeException {
/**
* Auto-generated version UID for serialization.
*/
private static final long serialVersionUID = -930881442829770230L;
public UnresolvableRequiredInterfaceException() {
super();
}
public UnresolvableRequiredInterfaceException(String msg) {
super(msg);
}
public UnresolvableRequiredInterfaceException(Throwable cause) {
super(cause);
}
public UnresolvableRequiredInterfaceException(String msg, Throwable cause) {
super(msg, cause);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/DefaultPathPriorizingPredicate.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import java.util.function.Predicate;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.Util;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.model.ComponentUtil;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
/**
* This is a node evaluator that assigns 0 to all nodes encoding (partial) compositions where each component refinement is with its default parameters.
*
* This is a somewhat cyclic component, because it needs to know the HASCO object it will advise, but it is already needed to initialize HASCO. So to use it, the hasco variable must be set after initialization.
*
* @author fmohr
*
*/
public class DefaultPathPriorizingPredicate<N, A> implements Predicate<N> {
private HASCO<?, N, A, ?> hasco;
@Override
public boolean test(final N node) {
if (this.hasco == null) {
throw new IllegalStateException("HASCO has not yet been set!");
}
if (!(node instanceof TFDNode)) {
throw new IllegalArgumentException("Currently we only support TFDNodes for node priorization");
}
if (this.hasco.getInput() == null) {
throw new IllegalStateException("HASCO exists, but its problem input has not been defined yet.");
}
ComponentInstance inst = Util.getSolutionCompositionFromState(this.hasco.getInput().getComponents(), ((TFDNode) node).getState(), false);
if (inst == null) {
return true;
}
return ComponentUtil.isDefaultConfiguration(inst);
}
public HASCO<?, N, A, ?> getHasco() {
return this.hasco;
}
public void setHasco(final HASCO<?, N, A, ?> hasco) {
this.hasco = hasco;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFD.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import ai.libs.hasco.core.DefaultHASCOPlanningReduction;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.SimpleForwardDecompositionReducer;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput;
public class HASCOViaFD<I extends GraphSearchWithPathEvaluationsInput<TFDNode, String, V>, V extends Comparable<V>> extends HASCO<I, TFDNode, String, V> {
public HASCOViaFD(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final IOptimalPathInORGraphSearchFactory<I, EvaluatedSearchGraphPath<TFDNode, String, V>, TFDNode, String, V, ?> searchFactory,
final AlgorithmicProblemReduction<? super GraphSearchWithPathEvaluationsInput<TFDNode, String, V>, ? super EvaluatedSearchGraphPath<TFDNode, String, V>, I, EvaluatedSearchGraphPath<TFDNode, String, V>> searchProblemTransformer) {
super(configurationProblem, new DefaultHASCOPlanningReduction<>(new SimpleForwardDecompositionReducer()), searchFactory, searchProblemTransformer);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDAndBestFirst.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
public class HASCOViaFDAndBestFirst<V extends Comparable<V>> extends HASCOViaFD<GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, V> {
public HASCOViaFDAndBestFirst(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem,
final IOptimalPathInORGraphSearchFactory<GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>, TFDNode, String, V, ?> searchFactory,
final AlgorithmicProblemReduction<IPathSearchWithPathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>, GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>> searchProblemTransformer) {
super(configurationProblem, searchFactory, searchProblemTransformer);
}
public HASCOViaFDAndBestFirst(final HASCO<GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, TFDNode, String, V> hasco) {
super(hasco.getInput(), hasco.getSearchFactory(), hasco.getSearchProblemTransformer());
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDAndBestFirstFactory.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.StandardBestFirstFactory;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer;
/**
* This factory makes it easier to create HASCO objects.
* In contrast to the standard HASCOFactory, it is only necessary to set the problem and a node evaluator
*
* Note that the standard HASCO search problem is a GraphSearchProblem, but BestFirst needs sub-path evaluation, so providing such a transformation is a MUST.
*
* It is possible to set the node evaluator, which will be then used in the search
*
* @author fmohr
*
* @param <V>
*/
public class HASCOViaFDAndBestFirstFactory<V extends Comparable<V>> extends HASCOViaFDFactory<GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, V> {
public HASCOViaFDAndBestFirstFactory() {
super();
this.setSearchFactory(new StandardBestFirstFactory<>());
}
public HASCOViaFDAndBestFirstFactory(final IPathEvaluator<TFDNode, String, V> nodeEvaluator) {
this();
this.setNodeEvaluator(nodeEvaluator);
}
public void setNodeEvaluator(final IPathEvaluator<TFDNode, String, V> nodeEvaluator) {
this.setSearchProblemTransformer(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<>(nodeEvaluator));
}
@Override
public StandardBestFirstFactory<TFDNode, String, V> getSearchFactory() {
return (StandardBestFirstFactory<TFDNode, String, V>)super.getSearchFactory();
}
@Override
@SuppressWarnings("unchecked")
public GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<TFDNode, String, V> getSearchProblemTransformer() {
return (GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<TFDNode, String, V>)super.getSearchProblemTransformer();
}
@Override
public HASCOViaFDAndBestFirst<V> getAlgorithm() {
if (this.getSearchProblemTransformer() == null) {
throw new IllegalStateException("Cannot create HASCO with BestFirst, because no node evaluator has been set. Please set a node evaluator prior to invoking getAlgorithm()");
}
return new HASCOViaFDAndBestFirst<>(super.getAlgorithm());
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDAndBestFirstWithDyadRankedNodeQueueFactory.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.DyadRankedBestFirstFactory;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.IBestFirstQueueConfiguration;
import ai.libs.jaicore.search.model.travesaltree.BackPointerPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer;
/**
* HASCO variant factory using best first and a dyad-ranked OPEN list.
*
* @author Helena Graf
*
*/
public class HASCOViaFDAndBestFirstWithDyadRankedNodeQueueFactory extends HASCOViaFDAndBestFirstFactory<Double> {
/**
* Constructs a new HASCO factory with a dyad ranked OPEN list configured with
* the given parameters.
*
* @param openConfig
*/
public HASCOViaFDAndBestFirstWithDyadRankedNodeQueueFactory(final IBestFirstQueueConfiguration<GraphSearchWithSubpathEvaluationsInput<TFDNode, String, Double>, TFDNode, String, Double> openConfig) {
super();
this.setNodeEvaluator(n -> 1.0);
this.setSearchFactory(new DyadRankedBestFirstFactory<>(openConfig));
}
@Override
public void setNodeEvaluator(final IPathEvaluator<TFDNode, String, Double> nodeEvaluator) {
this.setSearchProblemTransformer(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformer<>(n -> {
if (!(n instanceof BackPointerPath)) {
throw new IllegalArgumentException("This variant of HASCO currently only works with back-pointer based nodes.");
}
if (((BackPointerPath<?, ?, ?>) n).isGoal()) {
return nodeEvaluator.evaluate(n);
} else {
return 1.0;
}
}));
}
@Override
public HASCOViaFDAndBestFirst<Double> getAlgorithm() {
HASCOViaFDAndBestFirst<Double> hasco = super.getAlgorithm();
hasco.setCreateComponentInstancesFromNodesInsteadOfPlans(true);
return hasco;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDAndBestFirstWithRandomCompletions.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import java.util.function.Predicate;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.StandardBestFirstFactory;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS;
public class HASCOViaFDAndBestFirstWithRandomCompletions<V extends Comparable<V>> extends HASCOViaFDAndBestFirst<V> {
public HASCOViaFDAndBestFirstWithRandomCompletions(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final int numSamples, final int seed, final int timeoutForSingleCompletionEvaluationInMS,
final int timeoutForNodeEvaluationInMS) {
this(configurationProblem, null, numSamples, seed, timeoutForSingleCompletionEvaluationInMS, timeoutForNodeEvaluationInMS, n -> null);
}
public HASCOViaFDAndBestFirstWithRandomCompletions(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final Predicate<TFDNode> prioritingPredicate, final int numSamples, final int seed, final int timeoutForSingleCompletionEvaluationInMS,
final int timeoutForNodeEvaluationInMS, final IPathEvaluator<TFDNode, String, V> preferredNodeEvaluator) {
super(configurationProblem, new StandardBestFirstFactory<>(), new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS<TFDNode, String, V>(preferredNodeEvaluator, prioritingPredicate, seed, numSamples,
timeoutForSingleCompletionEvaluationInMS, timeoutForNodeEvaluationInMS));
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDAndBestFirstWithRandomCompletionsFactory.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import java.util.function.Predicate;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.StandardBestFirstFactory;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS;
public class HASCOViaFDAndBestFirstWithRandomCompletionsFactory extends HASCOViaFDAndBestFirstFactory<Double> {
private IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator = n -> null;
private Predicate<TFDNode> priorizingPredicate;
private long seed;
private int numSamples;
private int timeoutForSingleCompletionEvaluationInMS;
private int timeoutForNodeEvaluationInMS;
public HASCOViaFDAndBestFirstWithRandomCompletionsFactory(final long seed, final int numSamples) {
this(seed, numSamples, -1, -1);
}
public HASCOViaFDAndBestFirstWithRandomCompletionsFactory(final long seed, final int numSamples, final int timeoutForSingleCompletionEvaluationInMS, final int timeoutForNodeEvaluationInMS) {
super();
this.seed = seed;
this.numSamples = numSamples;
this.timeoutForSingleCompletionEvaluationInMS = timeoutForSingleCompletionEvaluationInMS;
this.timeoutForNodeEvaluationInMS = timeoutForNodeEvaluationInMS;
}
public Predicate<TFDNode> getPriorizingPredicate() {
return this.priorizingPredicate;
}
public void setPriorizingPredicate(final Predicate<TFDNode> priorizingPredicate) {
this.priorizingPredicate = priorizingPredicate;
}
public IPathEvaluator<TFDNode, String,Double> getPreferredNodeEvaluator() {
return this.preferredNodeEvaluator;
}
public void setPreferredNodeEvaluator(final IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator) {
this.preferredNodeEvaluator = preferredNodeEvaluator;
}
@Override
public HASCOViaFDAndBestFirst<Double> getAlgorithm() {
return this.getAlgorithm(this.getProblem());
}
@Override
public HASCOViaFDAndBestFirst<Double> getAlgorithm(final RefinementConfiguredSoftwareConfigurationProblem<Double> problem) {
this.setSearchProblemTransformer(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS<>(this.preferredNodeEvaluator, this.priorizingPredicate, this.seed, this.numSamples,
this.timeoutForSingleCompletionEvaluationInMS, this.timeoutForNodeEvaluationInMS));
this.setSearchFactory(new StandardBestFirstFactory<>());
return new HASCOViaFDAndBestFirst<>(super.getAlgorithm(problem));
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/HASCOViaFDFactory.java
|
package ai.libs.hasco.variants.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import ai.libs.hasco.core.HASCOFactory;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.SimpleForwardDecompositionReducer;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput;
public class HASCOViaFDFactory<S extends GraphSearchWithPathEvaluationsInput<TFDNode, String, V>, V extends Comparable<V>> extends HASCOFactory<S, TFDNode, String, V> {
public HASCOViaFDFactory() {
super();
this.setPlanningGraphGeneratorDeriver(new SimpleForwardDecompositionReducer());
}
public HASCOViaFDFactory(final IOptimalPathInORGraphSearchFactory<S, EvaluatedSearchGraphPath<TFDNode, String, V>, TFDNode, String, V, ?> searchFactory) {
this();
this.setSearchFactory(searchFactory);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/HASCOWithRandomCompletionsConfig.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
import ai.libs.hasco.core.HASCOConfig;
import ai.libs.jaicore.basic.IOwnerBasedRandomizedAlgorithmConfig;
public interface HASCOWithRandomCompletionsConfig extends HASCOConfig, IOwnerBasedRandomizedAlgorithmConfig {
public static final String K_RANDOM_COMPLETIONS_NUM = "hasco.random_completions.num";
public static final String K_RANDOM_COMPLETIONS_TIMEOUT_NODE = "hasco.random_completions.timeout_node";
public static final String K_RANDOM_COMPLETIONS_TIMEOUT_PATH = "hasco.random_completions.timeout_path";
/**
* @return Number of random completions drawn with RDFS.
*/
@Key(K_RANDOM_COMPLETIONS_NUM)
@DefaultValue("3")
public int numberOfRandomCompletions();
/**
* @return Timeout in ms for a node (this is an upper bound for the sum of the evaluations of all randomly drawn candidates).
*/
@Key(K_RANDOM_COMPLETIONS_TIMEOUT_NODE)
@DefaultValue("15000")
public int timeoutForNodeEvaluation();
/**
* @return Timeout in ms for a single evaluation of a solution candidate
*/
@Key(K_RANDOM_COMPLETIONS_TIMEOUT_PATH)
@DefaultValue("15000")
public int timeoutForCandidateEvaluation();
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/TwoPhaseHASCO.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.aeonbits.owner.ConfigFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchInput;
import org.api4.java.algorithm.events.IAlgorithmEvent;
import org.api4.java.algorithm.exceptions.AlgorithmException;
import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException;
import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException;
import org.api4.java.common.attributedobjects.IInformedObjectEvaluatorExtension;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.events.HASCOSolutionEvent;
import ai.libs.hasco.events.TwoPhaseHASCOPhaseSwitchEvent;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.optimizingfactory.SoftwareConfigurationAlgorithm;
import ai.libs.hasco.variants.forwarddecomposition.DefaultPathPriorizingPredicate;
import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent;
import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.concurrent.GlobalTimer;
import ai.libs.jaicore.concurrent.NamedTimerTask;
import ai.libs.jaicore.logging.LoggerUtil;
import ai.libs.jaicore.logging.ToJSONStringUtil;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput;
import ai.libs.jaicore.timing.TimedComputation;
public class TwoPhaseHASCO<S extends GraphSearchWithPathEvaluationsInput<N, A, Double>, N, A> extends SoftwareConfigurationAlgorithm<TwoPhaseSoftwareConfigurationProblem, HASCOSolutionCandidate<Double>, Double> {
private static final String SUFFIX_HASCO = ".hasco";
/* logging */
private Logger logger = LoggerFactory.getLogger(TwoPhaseHASCO.class);
private String loggerName;
/* HASCO configuration */
private HASCO<S, N, A, Double> hasco;
private NamedTimerTask phase1CancellationTask;
/** The solution selected during selection phase. */
private final Queue<HASCOSolutionCandidate<Double>> phase1ResultQueue = new LinkedBlockingQueue<>();
private final Map<HASCOSolutionCandidate<Double>, Double> selectionScoresOfCandidates = new HashMap<>();
private HASCOSolutionCandidate<Double> selectedHASCOSolution;
/* statistics */
private long timeOfStart = -1;
private int secondsSpentInPhase1;
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("hasco", this.hasco);
fields.put("phase1ResultQueue", this.phase1ResultQueue);
fields.put("selectedHASCOSolution", this.selectedHASCOSolution);
fields.put("timeOfStart", this.timeOfStart);
fields.put("secondsSpentInPhase1", this.secondsSpentInPhase1);
return ToJSONStringUtil.toJSONString(fields);
}
public TwoPhaseHASCO(final TwoPhaseSoftwareConfigurationProblem problem, final TwoPhaseHASCOConfig config) {
super(config != null ? config : ConfigFactory.create(TwoPhaseHASCOConfig.class), problem);
this.logger.info("Created TwoPhaseHASCO object.");
}
public TwoPhaseHASCO(final TwoPhaseSoftwareConfigurationProblem problem, final TwoPhaseHASCOConfig config, final HASCO<S, N, A, Double> hasco) {
this(problem, config);
this.setHasco(hasco);
}
public void setHasco(final HASCO<S, N, A, Double> hasco) {
this.hasco = hasco;
if (this.getLoggerName() != null) {
this.hasco.setLoggerName(this.getLoggerName() + SUFFIX_HASCO);
}
this.hasco.setConfig(this.getConfig());
this.hasco.registerListener(new Object() {
@Subscribe
public void receiveHASCOEvent(final IAlgorithmEvent event) {
/*
* forward the HASCO events and register solutions to update best seen solutions
* and fill up the queue
*/
if (!(event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent)) {
TwoPhaseHASCO.this.post(event);
}
if (event instanceof HASCOSolutionEvent) {
@SuppressWarnings("unchecked")
HASCOSolutionCandidate<Double> solution = ((HASCOSolutionEvent<Double>) event).getSolutionCandidate();
TwoPhaseHASCO.this.updateBestSeenSolution(solution);
TwoPhaseHASCO.this.logger.info("Received new solution {} with score {} and evaluation time {}ms", solution.getComponentInstance(), solution.getScore(), solution.getTimeToEvaluateCandidate());
TwoPhaseHASCO.this.phase1ResultQueue.add(solution);
}
}
}); // this is to register solutions during runtime
}
@Override
public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmTimeoutedException, AlgorithmException, AlgorithmExecutionCanceledException {
this.logger.info("Stepping 2phase HASCO. Current state: {}", this.getState());
switch (this.getState()) {
case CREATED:
if (this.hasco == null) {
throw new IllegalStateException("Cannot start algorithm before HASCO has been set. Please set HASCO either in constructor or via the setter.");
}
this.timeOfStart = System.currentTimeMillis();
AlgorithmInitializedEvent event = this.activate();
this.logger.info(
"Starting 2-Phase HASCO with the following setup:\n\tCPUs:{},\n\tTimeout: {}s\n\tTimeout per node evaluation: {}ms\n\tTimeout per candidate: {}ms\n\tNumber of Random Completions: {}\n\tExpected blow-ups are {} (selection) and {} (post-processing).\nThe search factory is: {}",
this.getNumCPUs(), this.getTimeout().seconds(), this.getConfig().timeoutForNodeEvaluation(), this.getConfig().timeoutForCandidateEvaluation(), this.getConfig().numberOfRandomCompletions(),
this.getConfig().expectedBlowupInSelection(), this.getConfig().expectedBlowupInPostprocessing(), this.hasco.getSearchFactory());
DefaultPathPriorizingPredicate<N, A> prioritizingPredicate = new DefaultPathPriorizingPredicate<>();
/* set HASCO objects within the default path prioritizing node evaluator */
prioritizingPredicate.setHasco(this.hasco);
this.setHASCOLoggerNameIfPossible();
this.logger.info("Initialized HASCO with start time {}.", this.timeOfStart);
return event;
/* active is only one step in this model; this could be refined */
case ACTIVE:
/* phase 1: gather solutions */
if (this.hasco.getTimeout().milliseconds() >= 0) {
GlobalTimer timer = GlobalTimer.getInstance();
this.phase1CancellationTask = new NamedTimerTask() {
@Override
public void exec() {
try {
/* check whether the algorithm has been shutdown, then also cancel this task */
if (TwoPhaseHASCO.this.isShutdownInitialized()) {
this.cancel();
return;
}
/* check termination of phase 1 */
int timeElapsed = (int) (System.currentTimeMillis() - TwoPhaseHASCO.this.timeOfStart);
int timeRemaining = (int) TwoPhaseHASCO.this.hasco.getTimeout().milliseconds() - timeElapsed;
if (timeRemaining < 2000 || TwoPhaseHASCO.this.shouldSearchTerminate(timeRemaining)) {
TwoPhaseHASCO.this.logger.info("Canceling HASCO (first phase). {}ms remaining.", timeRemaining);
TwoPhaseHASCO.this.hasco.cancel();
TwoPhaseHASCO.this.logger.info("HASCO canceled successfully after {}ms", (System.currentTimeMillis() - TwoPhaseHASCO.this.timeOfStart) - timeElapsed);
this.cancel();
}
}
catch (Exception e) {
TwoPhaseHASCO.this.logger.error("Observed {} while checking termination of phase 1. Stack trace is: {}", e.getClass().getName(), Arrays.stream(e.getStackTrace()).map(se -> "\n\t" + se.toString()).collect(Collectors.joining()));
}
}
};
this.phase1CancellationTask.setDescriptor("TwoPhaseHASCO task to check termination of phase 1");
timer.scheduleAtFixedRate(this.phase1CancellationTask, 1000, 1000);
}
this.logger.info("Entering phase 1. Calling HASCO with timeout {}.", this.hasco.getTimeout());
try {
this.hasco.call();
} catch (AlgorithmExecutionCanceledException e) {
this.logger.info("HASCO has terminated due to a cancel.");
if (this.isCanceled()) {
throw new AlgorithmExecutionCanceledException(e.getDelay());
}
} catch (AlgorithmTimeoutedException e) {
this.logger.warn("HASCO has timeouted. In fact, time to deadline is {}ms", this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
} finally {
if (this.phase1CancellationTask != null) {
this.phase1CancellationTask.cancel();
}
}
this.secondsSpentInPhase1 = (int) Math.round(System.currentTimeMillis() - this.timeOfStart / 1000.0);
/* if there is no candidate, and the remaining time is very small, throw an AlgorithmTimeoutedException */
this.logger.info("HASCO has finished. {} solutions were found.", this.phase1ResultQueue.size());
if (this.phase1ResultQueue.isEmpty() && this.getRemainingTimeToDeadline().seconds() < 10) {
this.logger.info("No solution found within phase 1. Throwing an AlgorithmTimeoutedException (This is conventional behavior for when an algorithm has not identified its solution when the timeout bound is hit.)");
this.terminate(); // this sends the AlgorithmFinishedEvent
throw new AlgorithmTimeoutedException(this.getRemainingTimeToDeadline().milliseconds() * -1);
}
/* phase 2: enter phase and set respective logs/events */
IObjectEvaluator<?, Double> selectionBenchmark = this.getInput().getSelectionBenchmark();
if (selectionBenchmark != null) {
if (this.logger.isInfoEnabled()) {
this.logger.info("Entering phase 2.");
this.logger.debug("Solutions seen so far had the following (internal) errors (one per line): {}", this.phase1ResultQueue.stream().map(e -> "\n\t" + e.getScore() + "(" + e.getComponentInstance() + ")").collect(Collectors.joining()));
}
this.post(new TwoPhaseHASCOPhaseSwitchEvent(this));
if (selectionBenchmark instanceof IInformedObjectEvaluatorExtension) {
this.logger.debug("Setting best score for selection phase node evaluator to {}", this.phase1ResultQueue.peek().getScore());
((IInformedObjectEvaluatorExtension<Double>) selectionBenchmark).informAboutBestScore(this.phase1ResultQueue.peek().getScore());
}
this.checkAndConductTermination();
/* phase 2: conduct it (select model) */
this.selectedHASCOSolution = this.selectModel();
}
else {
this.logger.info("Selection phase is disabled. Returning best result of phase 1.");
final Optional<HASCOSolutionCandidate<Double>> bestSolutionOptional = this.phase1ResultQueue.stream().min((s1, s2) -> s1.getScore().compareTo(s2.getScore()));
if (!bestSolutionOptional.isPresent()) {
throw new IllegalStateException("Cannot select a model since phase 1 has not returned any result.");
}
this.selectedHASCOSolution = bestSolutionOptional.get();
}
this.setBestSeenSolution(this.selectedHASCOSolution);
assert this.getBestSeenSolution().equals(this.selectedHASCOSolution);
return this.terminate();
default:
throw new IllegalStateException("Cannot do anything in state " + this.getState());
}
}
protected boolean shouldSearchTerminate(final long timeRemaining) {
Collection<HASCOSolutionCandidate<Double>> currentSelection = this.getSelectionForPhase2();
int estimateForRemainingRuntime = this.getExpectedTotalRemainingRuntimeForAGivenPool(currentSelection, true);
boolean terminatePhase1 = estimateForRemainingRuntime + 5000 > timeRemaining;
this.logger.debug("{}ms of the available time remaining in total, and we estimate a remaining runtime of {}ms. Terminate phase 1: {}", timeRemaining, estimateForRemainingRuntime, terminatePhase1);
return terminatePhase1;
}
public synchronized List<HASCOSolutionCandidate<Double>> getSelectionForPhase2() {
return this.getSelectionForPhase2(Integer.MAX_VALUE);
}
private static final double MAX_MARGIN_FROM_BEST = 0.03;
private synchronized List<HASCOSolutionCandidate<Double>> getSelectionForPhase2(final int remainingTime) {
if (this.getNumberOfConsideredSolutions() < 1) {
throw new UnsupportedOperationException("Cannot determine candidates for phase 2 if their number is set to a value less than 1. Here, it has been set to " + this.getNumberOfConsideredSolutions());
}
/* some initial checks for cases where we do not really have to do anything */
if (remainingTime < 0) {
throw new IllegalArgumentException("Cannot do anything in negative time (" + remainingTime + "ms)");
}
HASCOSolutionCandidate<Double> internallyOptimalSolution = this.getBestSeenSolution();
if (internallyOptimalSolution == null) {
return new ArrayList<>();
}
/* compute k pipeline candidates (the k/2 best, and k/2 random ones that do not deviate too much from the best one) */
double optimalInternalScore = internallyOptimalSolution.getScore();
int bestK = (int) Math.ceil((double) this.getNumberOfConsideredSolutions() / 2);
int randomK = this.getNumberOfConsideredSolutions() - bestK;
Collection<HASCOSolutionCandidate<Double>> potentialCandidates = new ArrayList<>(this.phase1ResultQueue).stream().filter(solution -> solution.getScore() <= optimalInternalScore + MAX_MARGIN_FROM_BEST).collect(Collectors.toList());
this.logger.debug("Computing {} best and {} random solutions for a max runtime of {}. Number of candidates that are at most {} worse than optimum {} is: {}/{}", bestK, randomK, remainingTime, MAX_MARGIN_FROM_BEST,
optimalInternalScore, potentialCandidates.size(), this.phase1ResultQueue.size());
List<HASCOSolutionCandidate<Double>> selectionCandidates = potentialCandidates.stream().limit(bestK).collect(Collectors.toList());
List<HASCOSolutionCandidate<Double>> remainingCandidates = new ArrayList<>(SetUtil.difference(potentialCandidates, selectionCandidates));
Collections.shuffle(remainingCandidates, new Random(this.getConfig().randomSeed()));
selectionCandidates.addAll(remainingCandidates.stream().limit(randomK).collect(Collectors.toList()));
if (this.logger.isTraceEnabled()) {
this.logger.trace("Determined the following candidates for selection phase (in this order): {}", selectionCandidates.stream().map(c -> "\n\t" + c.getScore() + ": " + c.getComponentInstance()).collect(Collectors.joining()));
}
/* if the candidates can be evaluated in the remaining time, return all of them */
int budget = this.getExpectedTotalRemainingRuntimeForAGivenPool(selectionCandidates, true);
if (budget < remainingTime) {
return selectionCandidates;
}
/* otherwise return as much as can be expectedly done in the time */
List<HASCOSolutionCandidate<Double>> actuallySelectedSolutions = new ArrayList<>();
int expectedRuntime;
for (HASCOSolutionCandidate<Double> pl : selectionCandidates) {
actuallySelectedSolutions.add(pl);
expectedRuntime = this.getExpectedTotalRemainingRuntimeForAGivenPool(actuallySelectedSolutions, true);
if (expectedRuntime > remainingTime && actuallySelectedSolutions.size() > 1) {
this.logger.info("Not considering solution {} for phase 2, because the expected runtime of the whole thing would be {}/{}", pl, expectedRuntime, remainingTime);
actuallySelectedSolutions.remove(pl);
}
}
return actuallySelectedSolutions;
}
private int getInSearchEvaluationTimeOfSolutionSet(final Collection<HASCOSolutionCandidate<Double>> solutions) {
return solutions.stream().map(HASCOSolutionCandidate::getTimeToEvaluateCandidate).mapToInt(x -> x).sum();
}
public int getExpectedTotalRemainingRuntimeForAGivenPool(final Collection<HASCOSolutionCandidate<Double>> solutions, final boolean assumeCurrentlyBestCandidateToBeSelected) {
int timeForPhase2 = this.getExpectedRuntimeForPhase2ForAGivenPool(solutions);
int timeForPostprocessing = 0;
if (assumeCurrentlyBestCandidateToBeSelected && this.getBestSeenSolution() != null) {
timeForPostprocessing = this.getPostprocessingTimeOfCurrentlyBest();
} else {
timeForPostprocessing = this.getMaximumPostprocessingTimeOfAnyPoolMember(solutions);
}
return timeForPhase2 + timeForPostprocessing;
}
public int getPostprocessingTimeOfCurrentlyBest() {
return (int) Math.round(this.getBestSeenSolution().getTimeToEvaluateCandidate() * this.getConfig().expectedBlowupInSelection() * this.getConfig().expectedBlowupInPostprocessing());
}
public int getMaximumPostprocessingTimeOfAnyPoolMember(final Collection<HASCOSolutionCandidate<Double>> solutions) {
int max = 0;
for (HASCOSolutionCandidate<Double> candidate : solutions) {
int expectedPostProcessingTime = (int) Math.ceil(candidate.getTimeToEvaluateCandidate() * this.getConfig().expectedBlowupInSelection() * this.getConfig().expectedBlowupInPostprocessing());
max = Math.max(max, expectedPostProcessingTime);
}
return max;
}
public int getExpectedRuntimeForPhase2ForAGivenPool(final Collection<HASCOSolutionCandidate<Double>> solutions) {
int inSearchMCEvalTime = this.getInSearchEvaluationTimeOfSolutionSet(solutions);
int estimateEvaluationTimeForSelectionPhase = (int) (inSearchMCEvalTime * this.getConfig().expectedBlowupInSelection());
int usableCPUs = Math.min(this.getConfig().cpus(), solutions.size());
int runtime = estimateEvaluationTimeForSelectionPhase / Math.max(1, usableCPUs);
this.logger.debug("Expected runtime is {} = {} * {} / {} for a pool of size {}", runtime, inSearchMCEvalTime, this.getConfig().expectedBlowupInSelection(), usableCPUs, solutions.size());
return runtime;
}
protected HASCOSolutionCandidate<Double> selectModel() throws InterruptedException {
final IObjectEvaluator<ComponentInstance, Double> evaluator = this.getInput().getSelectionBenchmark();
final Optional<HASCOSolutionCandidate<Double>> bestSolutionOptional = this.phase1ResultQueue.stream().min((s1, s2) -> s1.getScore().compareTo(s2.getScore()));
if (!bestSolutionOptional.isPresent()) {
throw new IllegalStateException("Cannot select a model since phase 1 has not returned any result.");
}
HASCOSolutionCandidate<Double> bestSolution = bestSolutionOptional.get();
double scoreOfBestSolution = bestSolution.getScore();
/* determine the models from which we want to select */
this.logger.info("Starting with phase 2: Selection of final model among the {} solutions that were identified.", this.phase1ResultQueue.size());
long startOfPhase2 = System.currentTimeMillis();
List<HASCOSolutionCandidate<Double>> ensembleToSelectFrom;
if (this.getTimeout().seconds() > 0) {
int remainingTime = (int) (this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
/*
* check remaining time, otherwise just return the solution with best F-Value.
*/
if (remainingTime < 0) {
this.logger.info("Timelimit is already exhausted, just returning a greedy solution that had internal error {}.", scoreOfBestSolution);
return bestSolution;
}
/* Get a queue of solutions to perform selection evaluation for. */
ensembleToSelectFrom = this.getSelectionForPhase2(remainingTime); // should be ordered by scores already (at
// least the first k)
int expectedTimeForPhase2 = this.getExpectedRuntimeForPhase2ForAGivenPool(ensembleToSelectFrom);
int expectedPostprocessingTime = this.getPostprocessingTimeOfCurrentlyBest();
int expectedMaximumRemainingRuntime = expectedTimeForPhase2 + expectedPostprocessingTime;
remainingTime = (int) (this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
if (expectedMaximumRemainingRuntime > remainingTime) {
this.logger.warn("Only {}ms remaining. We probably cannot make it in time.", remainingTime);
}
if (this.logger.isInfoEnabled()) {
this.logger.info(
"We expect phase 2 to consume {}ms for {} candidates, and post-processing is assumed to take at most {}ms, which is a total remaining runtime of {}ms. {}ms are permitted by timeout. The following candidates are considered (one per line with the internal error of phase 1): {}",
expectedTimeForPhase2, ensembleToSelectFrom.size(), expectedPostprocessingTime, expectedMaximumRemainingRuntime, remainingTime, ensembleToSelectFrom.stream().map(e -> "\n\t" + e.getScore() + "(" + e.getComponentInstance() + ")").collect(Collectors.joining()));
}
} else {
ensembleToSelectFrom = this.getSelectionForPhase2();
}
AtomicInteger evaluatorCounter = new AtomicInteger(0);
int threadsForPool = this.getConfig().threads() < 1 ? this.getConfig().cpus() : this.getConfig().threads() - 1; // subtract one thread for the one that is currently active
this.logger.info("Create a thread pool for phase 2 of size {}.", threadsForPool);
ExecutorService pool = Executors.newFixedThreadPool(threadsForPool, r -> {
Thread t = new Thread(r);
t.setName("final-evaluator-" + evaluatorCounter.incrementAndGet());
return t;
});
HASCOSolutionCandidate<Double> selectedModel = bestSolution; // backup solution
final Semaphore sem = new Semaphore(0);
long timestampOfDeadline = this.timeOfStart + this.getTimeout().milliseconds() - 2000;
/* evaluate each candiate */
List<Double> stats = new ArrayList<>();
ensembleToSelectFrom.forEach(c -> stats.add(Double.MAX_VALUE));
int n = ensembleToSelectFrom.size();
AtomicInteger evaluatedModels = new AtomicInteger();
for (int i = 0; i < n; i++) {
HASCOSolutionCandidate<Double> c = ensembleToSelectFrom.get(i);
final int run = i;
pool.submit(() -> {
long timestampStart = System.currentTimeMillis();
/* Time needed to compute the score of this solution in phase 1 */
int inSearchSolutionEvaluationTime = c.getTimeToEvaluateCandidate();
/* We assume linear growth of the evaluation time here to estimate (A) time for
* selection phase, (B) time for post-processing the solution in case it gets selected. */
int estimatedInSelectionSingleIterationEvaluationTime = (int) Math.round(inSearchSolutionEvaluationTime * TwoPhaseHASCO.this.getConfig().expectedBlowupInSelection());
int estimatedPostProcessingTime = (int) Math.round(estimatedInSelectionSingleIterationEvaluationTime * TwoPhaseHASCO.this.getConfig().expectedBlowupInPostprocessing());
int estimatedTotalEffortInCaseOfSelection = estimatedInSelectionSingleIterationEvaluationTime + Math.max(estimatedPostProcessingTime, TwoPhaseHASCO.this.getPostprocessingTimeOfCurrentlyBest());
TwoPhaseHASCO.this.logger.info("Estimating {}ms re-evaluation time and {}ms build time for candidate {} in case of selection (evaluation time during search was {}ms).", estimatedInSelectionSingleIterationEvaluationTime,
estimatedPostProcessingTime, c.getComponentInstance(), inSearchSolutionEvaluationTime);
/* If we have a global timeout, check whether considering this model is feasible. */
if (TwoPhaseHASCO.this.getTimeout().seconds() > 0) {
int remainingTime = (int) (timestampOfDeadline - System.currentTimeMillis());
if (estimatedTotalEffortInCaseOfSelection >= remainingTime) {
TwoPhaseHASCO.this.logger.info(
"Not evaluating solution {} anymore, because its insearch evaluation time was {}, expected evaluation time for selection is {}, and expected post-processing time is {}. This adds up to {}, which exceeds the remaining time of {}!",
c.getComponentInstance(), c.getTimeToEvaluateCandidate(), estimatedInSelectionSingleIterationEvaluationTime, estimatedPostProcessingTime, estimatedTotalEffortInCaseOfSelection, remainingTime);
sem.release();
return;
}
}
/* Schedule a timeout for this evaluation, which is 10% over the estimated time */
int timeoutForEvaluation = (int) Math.max(50, estimatedInSelectionSingleIterationEvaluationTime * (1 + TwoPhaseHASCO.this.getConfig().selectionPhaseTimeoutTolerance()));
try {
this.logger.debug("Starting selection performance computation with timeout {}", timeoutForEvaluation);
TimedComputation.compute(() -> {
double selectionScore = evaluator.evaluate(c.getComponentInstance());
evaluatedModels.incrementAndGet();
long trueEvaluationTime = (System.currentTimeMillis() - timestampStart);
stats.set(run, selectionScore);
this.selectionScoresOfCandidates.put(c, selectionScore);
TwoPhaseHASCO.this.logger.info("Obtained evaluation score of {} after {}ms for candidate {} (score assigned by HASCO was {}).", selectionScore, trueEvaluationTime, c.getComponentInstance(), c.getScore());
return true;
}, timeoutForEvaluation, "Timeout for evaluation of ensemble candidate " + c.getComponentInstance());
} catch (InterruptedException e) {
assert !Thread.currentThread().isInterrupted() : "The interrupted-flag should not be true when an InterruptedException is thrown!";
TwoPhaseHASCO.this.logger.info("Selection eval of {} got interrupted after {}ms. Defined timeout was: {}ms", c.getComponentInstance(), (System.currentTimeMillis() - timestampStart), timeoutForEvaluation);
Thread.currentThread().interrupt(); // no controlled interrupt needed here, because this is only a re-interrupt, and the execution will cease after this anyway
} catch (ExecutionException e) {
TwoPhaseHASCO.this.logger.error("Observed an exeption when trying to evaluate a candidate in the selection phase.\n{}", LoggerUtil.getExceptionInfo(e.getCause()));
} catch (AlgorithmTimeoutedException e) {
TwoPhaseHASCO.this.logger.info("Evaluation of candidate has timed out: {}", c);
} finally {
sem.release();
TwoPhaseHASCO.this.logger.debug("Released. Sem state: {}", sem.availablePermits());
}
});
}
/* now wait for results */
this.logger.info("Waiting for termination of {} computations running on {} threads.", n, this.getConfig().cpus());
sem.acquire(n);
long endOfPhase2 = System.currentTimeMillis();
this.logger.info("Finished phase 2 within {}ms net. Total runtime was {}ms. Evaluated solutions {}/{}", endOfPhase2 - startOfPhase2, endOfPhase2 - this.timeOfStart, evaluatedModels.get(), n);
this.logger.debug("Shutting down thread pool");
pool.shutdownNow();
pool.awaitTermination(5, TimeUnit.SECONDS);
if (!pool.isShutdown()) {
this.logger.warn("Thread pool is not shut down yet!");
}
/* set chosen model */
if (ensembleToSelectFrom.isEmpty()) {
this.logger.warn("No solution contained in ensemble.");
} else {
int selectedModelIndex = this.getCandidateThatWouldCurrentlyBeSelectedWithinPhase2(ensembleToSelectFrom, stats);
if (selectedModelIndex >= 0) {
selectedModel = ensembleToSelectFrom.get(selectedModelIndex);
this.logger.info("Selected a configuration: {}. Its internal score was {}. Selection score was {}", selectedModel.getComponentInstance(), selectedModel.getScore(), stats.get(selectedModelIndex));
} else {
this.logger.warn("Could not select any real solution in selection phase, just returning the best we have seen in HASCO.");
return bestSolution;
}
}
return selectedModel;
}
private synchronized int getCandidateThatWouldCurrentlyBeSelectedWithinPhase2(final List<HASCOSolutionCandidate<Double>> ensembleToSelectFrom, final List<Double> stats) {
int selectedModel = -1;
double best = Double.MAX_VALUE;
for (int i = 0; i < ensembleToSelectFrom.size(); i++) {
double score = stats.get(i);
if (score < best) {
best = score;
selectedModel = i;
}
}
return selectedModel;
}
public HASCO<S, N, A, Double> getHasco() {
return this.hasco;
}
public Queue<HASCOSolutionCandidate<Double>> getPhase1ResultQueue() {
return this.phase1ResultQueue;
}
public int getSecondsSpentInPhase1() {
return this.secondsSpentInPhase1;
}
public Map<HASCOSolutionCandidate<Double>, Double> getSelectionScoresOfCandidates() {
return this.selectionScoresOfCandidates;
}
@Override
public void shutdown() {
this.logger.info("Received shutdown signal. Cancelling phase 1 timer and invoking shutdown on parent.");
if (this.phase1CancellationTask != null) {
this.phase1CancellationTask.cancel();
}
super.shutdown();
}
@Override
public void cancel() {
this.logger.info("Received cancel signal.");
super.cancel();
this.logger.debug("Cancelling HASCO");
if (this.hasco != null) {
this.hasco.cancel();
}
assert this.isCanceled() : "Cancel-flag is not true at the end of the cancel procedure!";
}
/**
* @return The solution candidate selected by TwoPhase HASCO
*/
public HASCOSolutionCandidate<Double> getSelectedSolutionCandidate() {
return this.selectedHASCOSolution;
}
@Override
public TwoPhaseHASCOConfig getConfig() {
return (TwoPhaseHASCOConfig) super.getConfig();
}
/**
* @return The number of considered solutions in the selection phase.
*/
public int getNumberOfConsideredSolutions() {
return this.getConfig().selectionNumConsideredSolutions();
}
/**
* @param numberOfConsideredSolutions The number of considered solutions in the
* selection phase.
*/
public void setNumberOfConsideredSolutions(final int numberOfConsideredSolutions) {
this.getConfig().setProperty(TwoPhaseHASCOConfig.K_SELECTION_NUM_CONSIDERED_SOLUTIONS, numberOfConsideredSolutions + "");
}
public IPathSearchInput<N, A> getGraphSearchInput() {
if (this.hasco == null) {
throw new IllegalStateException("Cannot retrieve GraphGenerator prior to algorithm initialization.");
}
if (this.hasco.getSearch() == null) {
throw new IllegalStateException("Cannot retrieve GraphGenerator prior to algorithm initialization.");
}
return this.hasco.getSearch().getInput();
}
public TwoPhaseHASCOReport getReort() {
return new TwoPhaseHASCOReport(this.phase1ResultQueue.size(), this.secondsSpentInPhase1, this.selectedHASCOSolution);
}
@Override
public String getLoggerName() {
return this.loggerName;
}
@Override
public void setLoggerName(final String name) {
this.loggerName = name;
this.logger.info("Switching logger from {} to {}", this.logger.getName(), name);
this.logger = LoggerFactory.getLogger(name);
this.logger.info("Activated logger {} with name {}", name, this.logger.getName());
this.setHASCOLoggerNameIfPossible();
super.setLoggerName(this.loggerName + "._orgraphsearch");
}
private void setHASCOLoggerNameIfPossible() {
if (this.hasco == null) {
this.logger.info("HASCO object is null, so not setting a logger.");
return;
}
if (this.hasco.getLoggerName() != null && this.hasco.getLoggerName().equals(this.loggerName + SUFFIX_HASCO)) {
this.logger.info("HASCO logger has already been customized correctly, not customizing again.");
return;
}
this.logger.info("Setting logger of {} to {}{}", this.hasco.getId(), this.getLoggerName(), SUFFIX_HASCO);
this.hasco.setLoggerName(this.getLoggerName() + SUFFIX_HASCO);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/TwoPhaseHASCOConfig.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
public interface TwoPhaseHASCOConfig extends HASCOWithRandomCompletionsConfig {
public static final String K_RANDOM_SEED = "hasco.seed";
public static final String K_BLOWUP_SELECTION = "hasco.blowup.selection";
public static final String K_BLOWUP_POSTPROCESS = "hasco.blowup.postprocess";
public static final String K_SELECTION_EVALUATION_TIMEOUT_TOLERANCE = "hasco.selection.timeouttolerance";
public static final String K_SELECTION_NUM_CONSIDERED_SOLUTIONS = "hasco.selection.num_considered_solutions";
/**
* @return The seed for the pseudo randomness generator.
*/
@Key(K_RANDOM_SEED)
@DefaultValue("0")
public int randomSeed();
/**
* @return The number of solutions that are considered during selection phase.
*/
@Key(K_SELECTION_NUM_CONSIDERED_SOLUTIONS)
@DefaultValue("100")
public int selectionNumConsideredSolutions();
/**
* @return Expected multiplication in time for each solution candidate that will be required for evaluation
*/
@Key(K_BLOWUP_SELECTION)
@DefaultValue("NaN")
public double expectedBlowupInSelection();
/**
* @return Expected multiplication in time for each solution candidate that will be required for a postprocessing that should be considered when computing the timeout
*/
@Key(K_BLOWUP_POSTPROCESS)
@DefaultValue("NaN")
public double expectedBlowupInPostprocessing();
/**
* @return The factor by which the evaluation in the selection phase may exceed the time expected on the basis of the estimate given by the blow-up
*/
@Key(K_SELECTION_EVALUATION_TIMEOUT_TOLERANCE)
@DefaultValue("0.1")
public double selectionPhaseTimeoutTolerance();
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/TwoPhaseHASCOFactory.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOFactory;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.optimizingfactory.SoftwareConfigurationAlgorithmFactory;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithPathEvaluationsInput;
public class TwoPhaseHASCOFactory<S extends GraphSearchWithPathEvaluationsInput<N, A, Double>, N, A> implements SoftwareConfigurationAlgorithmFactory<TwoPhaseSoftwareConfigurationProblem, HASCOSolutionCandidate<Double>, Double, TwoPhaseHASCO<S, N, A>> {
private HASCOFactory<S, N, A, Double> hascoFactory;
private TwoPhaseSoftwareConfigurationProblem problem;
private TwoPhaseHASCOConfig config;
public TwoPhaseHASCOFactory() {
super();
}
public TwoPhaseHASCOFactory(final HASCOFactory<S, N, A, Double> hascoFactory) {
super();
this.hascoFactory = hascoFactory;
}
public HASCOFactory<S, N, A, Double> getHascoFactory() {
return this.hascoFactory;
}
public void setHascoFactory(final HASCOFactory<S, N, A, Double> hascoFactory) {
this.hascoFactory = hascoFactory;
}
public TwoPhaseHASCOConfig getConfig() {
return this.config;
}
public void setConfig(final TwoPhaseHASCOConfig config) {
this.config = config;
}
@Override
public TwoPhaseHASCO<S, N, A> getAlgorithm() {
return this.getAlgorithm(this.problem);
}
@Override
public TwoPhaseHASCO<S, N, A> getAlgorithm(final TwoPhaseSoftwareConfigurationProblem problem) {
this.hascoFactory.setProblemInput(problem);
this.hascoFactory.withAlgorithmConfig(this.config);
HASCO<S, N, A, Double> hasco = this.hascoFactory.getAlgorithm();
return new TwoPhaseHASCO<>(problem, this.config, hasco);
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/TwoPhaseHASCOReport.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
import ai.libs.hasco.core.HASCOSolutionCandidate;
public class TwoPhaseHASCOReport {
private final int numSolutionsInPhase1;
private final int durationPhase1;
private final HASCOSolutionCandidate<Double> returnedSolution;
public TwoPhaseHASCOReport(int numSolutionsInPhase1, int durationPhase1, HASCOSolutionCandidate<Double> returnedSolution) {
super();
this.numSolutionsInPhase1 = numSolutionsInPhase1;
this.durationPhase1 = durationPhase1;
this.returnedSolution = returnedSolution;
}
public int getNumSolutionsInPhase1() {
return numSolutionsInPhase1;
}
public int getDurationPhase1() {
return durationPhase1;
}
public HASCOSolutionCandidate<Double> getReturnedSolution() {
return returnedSolution;
}
}
|
0
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition
|
java-sources/ai/libs/hasco/0.2.1/ai/libs/hasco/variants/forwarddecomposition/twophase/TwoPhaseSoftwareConfigurationProblem.java
|
package ai.libs.hasco.variants.forwarddecomposition.twophase;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import ai.libs.hasco.core.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.hasco.core.SoftwareConfigurationProblem;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.model.Parameter;
import ai.libs.hasco.model.ParameterRefinementConfiguration;
public class TwoPhaseSoftwareConfigurationProblem extends RefinementConfiguredSoftwareConfigurationProblem<Double> {
private final IObjectEvaluator<ComponentInstance, Double> selectionBenchmark;
public TwoPhaseSoftwareConfigurationProblem(final File configurationFile, final String requiredInterface, final IObjectEvaluator<ComponentInstance, Double> compositionEvaluator,
final IObjectEvaluator<ComponentInstance, Double> selectionBenchmark) throws IOException {
super(configurationFile, requiredInterface, compositionEvaluator);
this.selectionBenchmark = selectionBenchmark;
}
public TwoPhaseSoftwareConfigurationProblem(final SoftwareConfigurationProblem<Double> coreProblem, final Map<Component, Map<Parameter, ParameterRefinementConfiguration>> paramRefinementConfig,
final IObjectEvaluator<ComponentInstance, Double> selectionBenchmark) {
super(coreProblem, paramRefinementConfig);
this.selectionBenchmark = selectionBenchmark;
}
public IObjectEvaluator<ComponentInstance, Double> getSelectionBenchmark() {
return this.selectionBenchmark;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((this.selectionBenchmark == null) ? 0 : this.selectionBenchmark.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
TwoPhaseSoftwareConfigurationProblem other = (TwoPhaseSoftwareConfigurationProblem) obj;
if (this.selectionBenchmark == null) {
if (other.selectionBenchmark != null) {
return false;
}
} else if (!this.selectionBenchmark.equals(other.selectionBenchmark)) {
return false;
}
return true;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/HASCOBuilder.java
|
package ai.libs.hasco.builder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import org.aeonbits.owner.ConfigCache;
import org.aeonbits.owner.ConfigFactory;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchInput;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import org.api4.java.algorithm.Timeout;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.api4.java.datastructure.graph.ILabeledPath;
import ai.libs.hasco.builder.forwarddecomposition.HASCOViaFDBuilder;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOConfig;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.hasco.core.reduction.planning2search.DefaultHASCOPlanningReduction;
import ai.libs.hasco.core.reduction.planning2search.IHASCOPlanningReduction;
import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.api.IRequiredInterfaceDefinition;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.components.model.SoftwareConfigurationProblem;
import ai.libs.jaicore.components.optimizingfactory.SoftwareConfigurationAlgorithmFactory;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.IHierarchicalPlanningToGraphSearchReduction;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchInput;
/**
*
* @author Felix Mohr
*
* @param <N> Type of nodes
* @param <A> Type of arcs
* @param <V> Type of Node scores
* @param <B> Type of the builder (for chaining)
*/
public abstract class HASCOBuilder<N, A, V extends Comparable<V>, B extends HASCOBuilder<N, A, V, B>>
implements SoftwareConfigurationAlgorithmFactory<RefinementConfiguredSoftwareConfigurationProblem<V>, HASCOSolutionCandidate<V>, V, HASCO<N, A, V>> {
public enum Reduction {
FORWARD
}
private final Class<V> scoreClass;
/* problem configuration */
private Collection<IComponent> components;
private String requiredInterface;
private IObjectEvaluator<IComponentInstance, V> evaluator;
private INumericParameterRefinementConfigurationMap paramRefinementConfig;
private RefinementConfiguredSoftwareConfigurationProblem<V> problem;
private IHASCOPlanningReduction<N, A> planningGraphGeneratorDeriver;
private IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> searchFactory;
private HASCOConfig hascoConfig;
public static HASCOViaFDBuilder<Double, ?> withForwardDecomposition() {
return withForwardDecomposition(Double.class);
}
protected HASCOBuilder(final Class<V> scoreClass) {
this.scoreClass = scoreClass;
this.withDefaultAlgorithmConfig();
}
public static <V extends Comparable<V>> HASCOViaFDBuilder<V, ?> withForwardDecomposition(final Class<V> evaluationType) {
return new HASCOViaFDBuilder<>(evaluationType);
}
public static HASCOViaFDBuilder<Double, ?> get(final Reduction reduction) {
return get(reduction, Double.class);
}
public static <V extends Comparable<V>> HASCOViaFDBuilder<V, ?> get(final Reduction reduction, final Class<V> scoreClass) {
if (reduction == Reduction.FORWARD) {
return withForwardDecomposition(scoreClass);
}
throw new IllegalArgumentException("Currently only support for forward decomposition.");
}
public static HASCOViaFDBuilder<Double, ?> get() {
return get(Reduction.FORWARD);
}
public static HASCOViaFDBuilder<Double, ?> get(final RefinementConfiguredSoftwareConfigurationProblem<Double> problem) {
HASCOViaFDBuilder<Double, ?> builder = get(Reduction.FORWARD);
builder.withProblem(problem);
return builder;
}
protected HASCOBuilder(final HASCOBuilder<N, A, V, ?> builder) {
this(builder.scoreClass);
this.problem = builder.problem;
this.planningGraphGeneratorDeriver = builder.planningGraphGeneratorDeriver;
this.searchFactory = builder.searchFactory;
this.hascoConfig = builder.hascoConfig;
}
@Override
public HASCO<N, A, V> getAlgorithm() {
this.requireThatProblemHasBeenDefined();
return this.getAlgorithm(this.problem);
}
@Override
public HASCO<N, A, V> getAlgorithm(final RefinementConfiguredSoftwareConfigurationProblem<V> problem) {
if (problem.getRequiredInterface() == null || problem.getRequiredInterface().isEmpty()) {
throw new IllegalArgumentException("No required interface defined!");
}
if (this.planningGraphGeneratorDeriver == null) {
throw new IllegalStateException("Cannot create HASCO, because no planningGraphGeneratorDeriver has been specified.");
}
if (this.searchFactory == null) {
throw new IllegalStateException("Cannot create HASCO, because no search factory has been specified.");
}
if (this.hascoConfig == null) {
throw new IllegalStateException("Cannot create HASCO, because no hasco configuration been specified.");
}
return new HASCO<>(this.hascoConfig, problem, this.planningGraphGeneratorDeriver, this.searchFactory);
}
public IHASCOPlanningReduction<N, A> getPlanningGraphGeneratorDeriver() {
return this.planningGraphGeneratorDeriver;
}
public void setProblemInput(final RefinementConfiguredSoftwareConfigurationProblem<V> problemInput) {
for (IComponent c : problemInput.getComponents()) {
for (IRequiredInterfaceDefinition ri : c.getRequiredInterfaces()) {
if (!ri.isOrdered()) {
throw new IllegalArgumentException("HASCO does currently not support non-ordered required-interfaces of components, but required interface \"" + ri.getId() + "\" of component \"" + c.getName() + "\" is not ordered!");
}
}
}
this.problem = problemInput;
}
@SuppressWarnings("unchecked")
public B withPlanningGraphGeneratorDeriver(
final IHierarchicalPlanningToGraphSearchReduction<N, A, ? super CEOCIPSTNPlanningProblem, ? extends IPlan, ? extends GraphSearchInput<N, A>, ? super ILabeledPath<N, A>> planning2searchReduction) {
this.planningGraphGeneratorDeriver = (planning2searchReduction instanceof IHASCOPlanningReduction) ? (IHASCOPlanningReduction<N, A>) planning2searchReduction : new DefaultHASCOPlanningReduction<>(planning2searchReduction);
return this.getSelf();
}
public IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> getSearchFactory() {
return this.searchFactory;
}
public B withSearchFactory(final IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> searchFactory) {
this.searchFactory = searchFactory;
return this.getSelf();
}
public B withDefaultAlgorithmConfig() {
this.withAlgorithmConfig(ConfigCache.getOrCreate(HASCOConfig.class));
return this.getSelf();
}
public B withAlgorithmConfig(final HASCOConfig hascoConfig) {
this.hascoConfig = hascoConfig;
return this.getSelf();
}
public B withAlgorithmConfigFile(final File hascoConfigFile) {
this.hascoConfig = (HASCOConfig) ConfigFactory.create(HASCOConfig.class).loadPropertiesFromFile(hascoConfigFile);
return this.getSelf();
}
public HASCOConfig getHascoConfig() {
return this.hascoConfig;
}
public RefinementConfiguredSoftwareConfigurationProblem<V> getProblem() {
return this.problem;
}
public B withProblem(final RefinementConfiguredSoftwareConfigurationProblem<V> problem) {
this.setProblemInput(problem);
this.components = problem.getComponents();
this.evaluator = problem.getCompositionEvaluator();
this.requiredInterface = problem.getRequiredInterface();
this.paramRefinementConfig = problem.getParamRefinementConfig();
return this.getSelf();
}
public B withProblem(final File componentFile, final String requiredInterface, final IObjectEvaluator<IComponentInstance, V> compositionEvaluator) throws IOException {
return this.withProblem(new RefinementConfiguredSoftwareConfigurationProblem<>(componentFile, requiredInterface, compositionEvaluator));
}
public Class<V> getScoreClass() {
return this.scoreClass;
}
protected void requireThatProblemHasBeenDefined() {
if (this.problem == null) {
throw new IllegalStateException("Configuration Problem has not been set!");
}
}
public Collection<IComponent> getComponents() {
return this.components;
}
public B withComponents(final Collection<? extends IComponent> components) {
this.components = new ArrayList<>(components);
this.compileProblemIfPossible();
return this.getSelf();
}
public String getRequiredInterface() {
return this.requiredInterface;
}
public B withRequiredInterface(final String requiredInterface) {
this.requiredInterface = requiredInterface;
this.compileProblemIfPossible();
return this.getSelf();
}
public IObjectEvaluator<IComponentInstance, V> getEvaluator() {
return this.evaluator;
}
public B withEvaluator(final IObjectEvaluator<IComponentInstance, V> evaluator) {
this.evaluator = evaluator;
this.compileProblemIfPossible();
return this.getSelf();
}
public INumericParameterRefinementConfigurationMap getParamRefinementConfig() {
return this.paramRefinementConfig;
}
public B withParamRefinementConfig(final INumericParameterRefinementConfigurationMap paramRefinementConfig) {
this.paramRefinementConfig = paramRefinementConfig;
this.compileProblemIfPossible();
return this.getSelf();
}
private void compileProblemIfPossible() {
if (this.components != null && this.requiredInterface != null && this.paramRefinementConfig != null && this.evaluator != null) {
SoftwareConfigurationProblem<V> coreProblem = new SoftwareConfigurationProblem<>(this.components, this.requiredInterface, this.evaluator);
this.problem = new RefinementConfiguredSoftwareConfigurationProblem<>(coreProblem, this.paramRefinementConfig);
}
}
public IPathSearchInput<N, A> getGraphSearchInput() {
if (this.components == null) {
throw new IllegalStateException("Cannot create graph search input; no components defined yet.");
}
if (this.requiredInterface == null) {
throw new IllegalStateException("Cannot create graph search input; no required interface defined yet.");
}
if (this.paramRefinementConfig == null) {
throw new IllegalStateException("Cannot create graph search input; no param refinement config defined yet.");
}
if (this.planningGraphGeneratorDeriver == null) {
throw new IllegalStateException("Cannot create graph search input; no reduction from planning to graph search defined yet.");
}
return HASCOUtil.getSearchProblem(this.components, this.requiredInterface, this.paramRefinementConfig, this.planningGraphGeneratorDeriver);
}
public B withTimeout(final Timeout to) {
this.hascoConfig.setProperty(IOwnerBasedAlgorithmConfig.K_TIMEOUT, "" + to.milliseconds());
return this.getSelf();
}
public B withCPUs(final int numCPUs) {
this.hascoConfig.setProperty(IOwnerBasedAlgorithmConfig.K_CPUS, "" + numCPUs);
return this.getSelf();
}
@SuppressWarnings("unchecked")
public B getSelf() {
return (B) this;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/TwoPhaseHASCOBuilder.java
|
package ai.libs.hasco.builder;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.twophase.TwoPhaseHASCO;
import ai.libs.hasco.twophase.TwoPhaseHASCOConfig;
import ai.libs.hasco.twophase.TwoPhaseSoftwareConfigurationProblem;
import ai.libs.jaicore.components.optimizingfactory.SoftwareConfigurationAlgorithmFactory;
public class TwoPhaseHASCOBuilder<N, A> implements SoftwareConfigurationAlgorithmFactory<TwoPhaseSoftwareConfigurationProblem, HASCOSolutionCandidate<Double>, Double, TwoPhaseHASCO<N, A>> {
private HASCOBuilder<N, A, Double, ?> hascoFactory;
private TwoPhaseSoftwareConfigurationProblem problem;
private TwoPhaseHASCOConfig config;
public TwoPhaseHASCOBuilder() {
super();
}
public TwoPhaseHASCOBuilder(final HASCOBuilder<N, A, Double, ?> hascoFactory) {
super();
this.hascoFactory = hascoFactory;
}
public HASCOBuilder<N, A, Double, ?> getHascoFactory() {
return this.hascoFactory;
}
public void setHascoFactory(final HASCOBuilder<N, A, Double, ?> hascoFactory) {
this.hascoFactory = hascoFactory;
}
public TwoPhaseHASCOConfig getConfig() {
return this.config;
}
public void setConfig(final TwoPhaseHASCOConfig config) {
this.config = config;
}
@Override
public TwoPhaseHASCO<N, A> getAlgorithm() {
return this.getAlgorithm(this.problem);
}
@Override
public TwoPhaseHASCO<N, A> getAlgorithm(final TwoPhaseSoftwareConfigurationProblem problem) {
this.hascoFactory.setProblemInput(problem);
this.hascoFactory.withAlgorithmConfig(this.config);
HASCO<N, A, Double> hasco = this.hascoFactory.getAlgorithm();
return new TwoPhaseHASCO<>(problem, this.config, hasco);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/DefaultPathPriorizingPredicate.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import java.util.Objects;
import java.util.function.Predicate;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.hasco.core.IHascoAware;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstanceUtil;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
/**
* This is a node evaluator that assigns 0 to all nodes encoding (partial) compositions where each component refinement is with its default parameters.
*
* This is a somewhat cyclic component, because it needs to know the HASCO object it will advise, but it is already needed to initialize HASCO. So to use it, the hasco variable must be set after initialization.
*
* @author fmohr
*
*/
public class DefaultPathPriorizingPredicate<N> implements Predicate<N>, IHascoAware {
private HASCO<?, ?, ?> hasco;
@Override
public boolean test(final N node) {
Objects.requireNonNull(node);
if (this.hasco == null) {
throw new IllegalStateException("HASCO has not yet been set!");
}
if (!(node instanceof TFDNode)) {
throw new IllegalArgumentException("Currently we only support TFDNodes for node priorization");
}
if (this.hasco.getInput() == null) {
throw new IllegalStateException("HASCO exists, but its problem input has not been defined yet.");
}
TFDNode tfd = (TFDNode)node;
Monom stateAfterLastAction = tfd.getState();
/* now check whether the last edge was a method that will necessary induce a certain successor state */
ComponentInstance inst = HASCOUtil.getSolutionCompositionFromState(this.hasco.getInput().getComponents(), stateAfterLastAction, false);
if (inst == null) {
return true;
}
return ComponentInstanceUtil.isDefaultConfiguration(inst);
}
@Override
public void setHascoReference(final HASCO<?, ?, ?> hasco) {
this.hasco = hasco;
}
@Override
public HASCO<?, ?, ?> getHASCOReference() {
return this.hasco;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/HASCOViaFD.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.reduction.planning2search.DefaultHASCOPlanningReduction;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.SimpleForwardDecompositionReducer;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
public class HASCOViaFD<V extends Comparable<V>> extends HASCO<TFDNode, String, V> {
public HASCOViaFD(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>, TFDNode, String, V, ?> searchFactory) {
super(configurationProblem, new DefaultHASCOPlanningReduction<>(new SimpleForwardDecompositionReducer()), searchFactory);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/HASCOViaFDAndBestFirstBuilder.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import java.util.Objects;
import org.api4.java.ai.graphsearch.problem.IPathSearchInput;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import ai.libs.hasco.builder.HASCOBuilder;
import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.BestFirstFactory;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.search.probleminputs.GraphSearchWithSubpathEvaluationsInput;
/**
* This factory makes it easier to create HASCO objects. In contrast to the standard HASCOFactory, it is only necessary to set the problem and a node evaluator
*
* Note that the standard HASCO search problem is a GraphSearchProblem, but BestFirst needs sub-path evaluation, so providing such a transformation is a MUST.
*
* It is possible to set the node evaluator, which will be then used in the search
*
* @author Felix Mohr
*
* @param <V> The node evaluation type (must be Comparable, typically Double)
*/
public class HASCOViaFDAndBestFirstBuilder<V extends Comparable<V>, B extends HASCOViaFDAndBestFirstBuilder<V, B>> extends HASCOViaFDBuilder<V, B> {
private AlgorithmicProblemReduction<IPathSearchInput<TFDNode, String>, EvaluatedSearchGraphPath<TFDNode, String, V>, GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>> reduction;
public HASCOViaFDAndBestFirstBuilder(final HASCOBuilder<TFDNode, String, V, ?> b) {
super(b);
this.withSearchFactory(new BestFirstFactory<>());
}
@SuppressWarnings("unchecked")
@Override
public BestFirstFactory<IPathSearchWithPathEvaluationsInput<TFDNode, String, V>, TFDNode, String, V> getSearchFactory() {
return (BestFirstFactory<IPathSearchWithPathEvaluationsInput<TFDNode, String, V>, TFDNode, String, V>) super.getSearchFactory();
}
public B withReduction(final AlgorithmicProblemReduction<IPathSearchInput<TFDNode, String>, EvaluatedSearchGraphPath<TFDNode, String, V>, GraphSearchWithSubpathEvaluationsInput<TFDNode, String, V>, EvaluatedSearchGraphPath<TFDNode, String, V>> reduction) {
Objects.requireNonNull(reduction);
this.reduction = reduction;
return this.getSelf();
}
@Override
public HASCOViaFD<V> getAlgorithm() {
this.requireThatProblemHasBeenDefined();
if (this.reduction == null) {
throw new IllegalStateException("No reduction defined yet.");
}
BestFirstFactory<IPathSearchWithPathEvaluationsInput<TFDNode, String, V>, TFDNode, String, V> factory = new BestFirstFactory<>();
factory.setReduction(this.reduction);
HASCOViaFD<V> hasco = new HASCOViaFD<>(super.getProblem(), factory);
hasco.setConfig(this.getHascoConfig());
return hasco;
}
@SuppressWarnings("unchecked")
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withRandomCompletions() {
if (!this.getScoreClass().equals(Double.class)) {
throw new IllegalStateException("Random completions only applicable for double-typed problems.");
}
return new HASCOViaFDAndBestFirstWithRandomCompletionsBuilder((HASCOViaFDAndBestFirstBuilder<Double, ?>) this);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/HASCOViaFDAndBestFirstWithRandomCompletionsBuilder.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import java.util.Random;
import java.util.function.Predicate;
import org.api4.java.ai.graphsearch.problem.pathsearch.pathevaluation.IPathEvaluator;
import org.api4.java.algorithm.Timeout;
import ai.libs.hasco.builder.HASCOBuilder;
import ai.libs.hasco.core.IHascoAware;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS;
public class HASCOViaFDAndBestFirstWithRandomCompletionsBuilder extends HASCOViaFDAndBestFirstBuilder<Double, HASCOViaFDAndBestFirstWithRandomCompletionsBuilder> {
private IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator = null;
private Predicate<TFDNode> priorizingPredicate;
private Random random = new Random();
private int numSamples = 10;
private int timeoutForSingleCompletionEvaluationInMS = -1;
private int timeoutForNodeEvaluationInMS = -1;
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder(final HASCOBuilder<TFDNode, String, Double, ?> builder) {
super(builder);
}
public Predicate<TFDNode> getPriorizingPredicate() {
return this.priorizingPredicate;
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withPriorizingPredicate(final Predicate<TFDNode> priorizingPredicate) {
this.priorizingPredicate = priorizingPredicate;
return this.getSelf();
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withRandom(final Random random) {
this.random = random;
return this.getSelf();
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withSeed(final long seed) {
return this.withRandom(new Random(seed));
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withNumSamples(final int samples) {
this.numSamples = samples;
return this.getSelf();
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withTimeoutForNode(final Timeout to) {
this.timeoutForNodeEvaluationInMS = (int)to.milliseconds();
return this.getSelf();
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withTimeoutForSingleEvaluation(final Timeout to) {
this.timeoutForSingleCompletionEvaluationInMS = (int)to.milliseconds();
return this.getSelf();
}
public IPathEvaluator<TFDNode, String,Double> getPreferredNodeEvaluator() {
return this.preferredNodeEvaluator;
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withPreferredNodeEvaluator(final IPathEvaluator<TFDNode, String, Double> preferredNodeEvaluator) {
this.preferredNodeEvaluator = preferredNodeEvaluator;
return this.getSelf();
}
public HASCOViaFDAndBestFirstWithRandomCompletionsBuilder withDefaultParametrizationsFirst() {
return this.withPriorizingPredicate(new DefaultPathPriorizingPredicate<>());
}
@Override
public HASCOViaFD<Double> getAlgorithm(){
/* create node evaluator */
this.requireThatProblemHasBeenDefined();
this.withReduction(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationInputTransformerViaRDFS<>(this.preferredNodeEvaluator, this.priorizingPredicate, this.random, this.numSamples, this.timeoutForSingleCompletionEvaluationInMS, this.timeoutForNodeEvaluationInMS));
/* now get algorithm and tell some of its components about it */
HASCOViaFD<Double> hasco = super.getAlgorithm();
if (this.priorizingPredicate instanceof IHascoAware) {
((IHascoAware)this.priorizingPredicate).setHascoReference(hasco);
}
return hasco;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/HASCOViaFDAndDFSBuilder.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import ai.libs.hasco.builder.HASCOBuilder;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.algorithms.standard.auxilliary.iteratingoptimizer.IteratingGraphSearchOptimizerFactory;
import ai.libs.jaicore.search.algorithms.standard.dfs.DepthFirstSearchFactory;
public class HASCOViaFDAndDFSBuilder<V extends Comparable<V>, B extends HASCOViaFDAndDFSBuilder<V, B>> extends HASCOViaFDBuilder<V, B> {
public HASCOViaFDAndDFSBuilder(final Class<V> scoreClass) {
super(scoreClass);
}
public HASCOViaFDAndDFSBuilder(final HASCOBuilder<TFDNode, String, V, ?> b) {
super(b);
this.withSearchFactory(new IteratingGraphSearchOptimizerFactory<>(new DepthFirstSearchFactory<>()));
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/builder/forwarddecomposition/HASCOViaFDBuilder.java
|
package ai.libs.hasco.builder.forwarddecomposition;
import ai.libs.hasco.builder.HASCOBuilder;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.SimpleForwardDecompositionReducer;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.problemtransformers.GraphSearchProblemInputToGraphSearchWithSubpathEvaluationViaUninformedness;
public class HASCOViaFDBuilder<V extends Comparable<V>, B extends HASCOViaFDBuilder<V, B>> extends HASCOBuilder<TFDNode, String, V, B> {
public HASCOViaFDBuilder(final Class<V> scoreClass) {
super(scoreClass);
this.withPlanningGraphGeneratorDeriver(new SimpleForwardDecompositionReducer());
}
public HASCOViaFDBuilder(final HASCOBuilder<TFDNode, String, V, ?> b) {
super(b);
this.withPlanningGraphGeneratorDeriver(new SimpleForwardDecompositionReducer());
}
public HASCOViaFDAndDFSBuilder<V, ?> withDFS() {
return new HASCOViaFDAndDFSBuilder<>(this);
}
public HASCOViaFDAndBestFirstBuilder<V, ?> withBestFirst() {
return new HASCOViaFDAndBestFirstBuilder<>(this);
}
public HASCOViaFDAndBestFirstBuilder<Double, ?> withBlindSearch() {
if (!this.getScoreClass().equals(Double.class)) {
throw new IllegalStateException("Blind Best First is only possible for node values of type Double, but is " + this.getScoreClass().getName());
}
@SuppressWarnings("unchecked")
HASCOViaFDAndBestFirstBuilder<Double, ?> builder = (HASCOViaFDAndBestFirstBuilder<Double, ?>)this.withBestFirst();
builder.withReduction(new GraphSearchProblemInputToGraphSearchWithSubpathEvaluationViaUninformedness());
return builder;
}
@Override
public HASCOViaFD<V> getAlgorithm() {
this.requireThatProblemHasBeenDefined();
HASCOViaFD<V> hasco = new HASCOViaFD<>(this.getProblem(), this.getSearchFactory());
hasco.setConfig(this.getHascoConfig());
return hasco;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/HASCO.java
|
package ai.libs.hasco.core;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.aeonbits.owner.ConfigFactory;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearch;
import org.api4.java.ai.graphsearch.problem.IOptimalPathInORGraphSearchFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import org.api4.java.algorithm.events.IAlgorithmEvent;
import org.api4.java.algorithm.exceptions.AlgorithmException;
import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException;
import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException;
import org.api4.java.common.control.ILoggingCustomizable;
import org.api4.java.datastructure.graph.implicit.IGraphGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.events.HASCOSolutionEvent;
import ai.libs.hasco.core.reduction.planning2search.IHASCOPlanningReduction;
import ai.libs.hasco.core.reduction.softcomp2planning.HASCOReductionSolutionEvaluator;
import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent;
import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.ComponentUtil;
import ai.libs.jaicore.components.model.CompositionProblemUtil;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.components.model.UnparametrizedComponentInstance;
import ai.libs.jaicore.components.optimizingfactory.SoftwareConfigurationAlgorithm;
import ai.libs.jaicore.components.serialization.ComponentSerialization;
import ai.libs.jaicore.logging.ToJSONStringUtil;
import ai.libs.jaicore.planning.core.EvaluatedSearchGraphBasedPlan;
import ai.libs.jaicore.planning.core.interfaces.IEvaluatedGraphSearchBasedPlan;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.CostSensitiveHTNPlanningProblem;
import ai.libs.jaicore.search.algorithms.standard.bestfirst.events.EvaluatedSearchSolutionCandidateFoundEvent;
import ai.libs.jaicore.search.model.other.EvaluatedSearchGraphPath;
import ai.libs.jaicore.timing.TimeRecordingObjectEvaluator;
/**
* Hierarchically create an object of type T
*
* @author fmohr, wever
*
* @param <N>
* Type of nodes in the search graph to which the problem is reduced
* @param <A>
* Type of arc labels in the search graph to which the problem is reduced
* @param <V>
* Type of scores of solutions
*/
public class HASCO<N, A, V extends Comparable<V>> extends SoftwareConfigurationAlgorithm<RefinementConfiguredSoftwareConfigurationProblem<V>, HASCOSolutionCandidate<V>, V> {
private Logger logger = LoggerFactory.getLogger(HASCO.class);
private String loggerName; // this is a bit redundant in order to more easily configure sub-loggers
/* problem and algorithm setup */
private final IHASCOPlanningReduction<N, A> planning2searchReduction;
private final IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> searchFactory;
/* working constants of the algorithms */
private final CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> planningProblem;
private final IPathSearchWithPathEvaluationsInput<N, A, V> searchProblem;
private final IOptimalPathInORGraphSearch<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V> search;
private final List<HASCOSolutionCandidate<V>> listOfAllRecognizedSolutions = new ArrayList<>();
private int numUnparametrizedSolutions = -1;
private final Set<UnparametrizedComponentInstance> returnedUnparametrizedComponentInstances = new HashSet<>();
private Map<EvaluatedSearchSolutionCandidateFoundEvent<N, A, V>, HASCOSolutionEvent<V>> hascoSolutionEventCache = new ConcurrentHashMap<>();
private boolean createComponentInstancesFromNodesInsteadOfPlans = false;
private AtomicBoolean cancelCompleted = new AtomicBoolean();
private final ComponentSerialization serializer = new ComponentSerialization();
/* runtime variables of algorithm */
private final TimeRecordingObjectEvaluator<IComponentInstance, V> timeGrabbingEvaluationWrapper;
public HASCO(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final IHASCOPlanningReduction<N, A> planningGraphGeneratorDeriver,
final IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> searchFactory) {
this(ConfigFactory.create(HASCOConfig.class), configurationProblem, planningGraphGeneratorDeriver, searchFactory);
}
public HASCO(final HASCOConfig algorithmConfig, final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final IHASCOPlanningReduction<N, A> planning2searchReduction,
final IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> searchFactory) {
super(algorithmConfig, configurationProblem);
if (configurationProblem == null) {
throw new IllegalArgumentException("Cannot work with configuration problem NULL");
}
if (configurationProblem.getRequiredInterface() == null || configurationProblem.getRequiredInterface().isEmpty()) {
throw new IllegalArgumentException("Not required interface defined in the input");
}
this.planning2searchReduction = planning2searchReduction;
this.searchFactory = searchFactory;
/* check whether there is a component that satisfies the query */
final int numberOfComponentsThatResolveRequest = CompositionProblemUtil.getComponentsThatResolveProblem(configurationProblem).size();
if (numberOfComponentsThatResolveRequest == 0) {
throw new IllegalArgumentException("There is no component that provides the required interface \"" + configurationProblem.getRequiredInterface() + "\"");
}
this.logger.info("Identified {} components that can be used to resolve the query.", numberOfComponentsThatResolveRequest);
/* derive planning problem and search problem */
this.logger.debug("Deriving search problem");
this.planningProblem = HASCOUtil.getPlannigProblem(configurationProblem);
this.searchProblem = HASCOUtil.getSearchProblemWithEvaluation(this.planningProblem, planning2searchReduction);
this.timeGrabbingEvaluationWrapper = ((HASCOReductionSolutionEvaluator<V>)this.planningProblem.getPlanEvaluator()).getTimedEvaluator();
/* create search object */
this.logger.debug("Creating and initializing the search object");
this.search = this.searchFactory.getAlgorithm(this.searchProblem);
}
@Override
public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmExecutionCanceledException, AlgorithmTimeoutedException, AlgorithmException {
/* check on termination */
this.logger.trace("Conducting next step in {}.", this.getId());
this.checkAndConductTermination();
this.logger.trace("No stop criteria have caused HASCO to stop up to now. Proceeding ...");
/* act depending on state */
switch (this.getState()) {
case CREATED:
if (this.logger.isInfoEnabled()) {
String reqInterface = this.getInput().getRequiredInterface();
String components = this.getInput().getComponents().stream().map(c -> "\n\t\t [" + (c.getProvidedInterfaces().contains(reqInterface) ? "*" : " ") + "]" + c.toString()).collect(Collectors.joining());
this.logger.info(
"Starting HASCO run. Parametrization:\n\tCPUs: {}\n\tTimeout: {}s\n\tNode evaluator: {}\nProblem:\n\tRequired Interface: {}\n\tComponents: {}\nEnable DEBUG to get an overview of the considered HTN planning problem.",
this.getNumCPUs(), this.getTimeout().seconds(), this.search.getInput().getPathEvaluator(), reqInterface, components);
}
if (this.logger.isDebugEnabled()) {
String operations = this.planningProblem.getCorePlanningProblem().getDomain().getOperations().stream()
.map(o -> "\n\t\t" + o.getName() + "(" + o.getParams() + ")\n\t\t\tPre: " + o.getPrecondition() + "\n\t\t\tAdd List: " + o.getAddLists() + "\n\t\t\tDelete List: " + o.getDeleteLists()).collect(Collectors.joining());
String methods = this.planningProblem.getCorePlanningProblem().getDomain().getMethods().stream().map(m -> "\n\t\t" + m.getName() + "(" + m.getParameters() + ") for task " + m.getTask() + "\n\t\t\tPre: " + m.getPrecondition()
+ "\n\t\t\tPre Eval: " + m.getEvaluablePrecondition() + "\n\t\t\tNetwork: " + m.getNetwork().getLineBasedStringRepresentation()).collect(Collectors.joining());
this.logger.debug("Derived the following HTN planning problem:\n\tOperations:{}\n\tMethods:{}", operations, methods);
}
AlgorithmInitializedEvent event = this.activate();
/* analyze problem */
this.numUnparametrizedSolutions = ComponentUtil.getNumberOfUnparametrizedCompositions(this.getInput().getComponents(), this.getInput().getRequiredInterface());
this.logger.info("Search space contains {} unparametrized solutions.", this.numUnparametrizedSolutions);
/* setup search algorithm */
this.search.setNumCPUs(this.getNumCPUs());
this.search.setTimeout(this.getTimeout());
if (this.loggerName != null && this.loggerName.length() > 0 && this.search instanceof ILoggingCustomizable) {
this.logger.info("Setting logger name of {} to {}.search", this.search.getId(), this.loggerName);
((ILoggingCustomizable) this.search).setLoggerName(this.loggerName + ".search");
} else {
this.logger.info("Not setting the logger name of the search. Logger name of HASCO is {}. Search loggingCustomizable: {}", this.loggerName, (this.search instanceof ILoggingCustomizable));
}
/* register a listener on the search that will forward all events to HASCO's event bus */
this.search.registerListener(new Object() {
@Subscribe
public void receiveSearchEvent(final IAlgorithmEvent event) {
if (!(event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent)) {
HASCO.this.post(event);
}
}
@Subscribe
public void receiveSolutionCandidateFoundEvent(final EvaluatedSearchSolutionCandidateFoundEvent<N, A, V> solutionEvent) {
HASCO.this.logger.info("Received solution event {}", solutionEvent);
EvaluatedSearchGraphPath<N, A, V> searchPath = solutionEvent.getSolutionCandidate();
IPlan plan = HASCO.this.planning2searchReduction.decodeSolution(searchPath);
ComponentInstance objectInstance;
if (HASCO.this.createComponentInstancesFromNodesInsteadOfPlans) {
objectInstance = HASCOUtil.getSolutionCompositionFromState(HASCO.this.getInput().getComponents(), ((TFDNode) searchPath.getNodes().get(searchPath.getNodes().size() - 1)).getState(), true);
} else {
objectInstance = HASCOUtil.getSolutionCompositionForPlan(HASCO.this.getInput().getComponents(), HASCO.this.planningProblem.getCorePlanningProblem().getInit(), plan, true);
}
HASCO.this.returnedUnparametrizedComponentInstances.add(new UnparametrizedComponentInstance(objectInstance));
V score;
boolean scoreInCache = HASCO.this.timeGrabbingEvaluationWrapper.hasEvaluationForComponentInstance(objectInstance);
if (!scoreInCache) {
throw new IllegalStateException("The time recording object evaluator has no information about component instance " + objectInstance);
}
score = solutionEvent.getSolutionCandidate().getScore();
IEvaluatedGraphSearchBasedPlan<N, A, V> evaluatedPlan = new EvaluatedSearchGraphBasedPlan<>(plan, score, searchPath);
HASCOSolutionCandidate<V> solution = new HASCOSolutionCandidate<>(objectInstance, evaluatedPlan, HASCO.this.timeGrabbingEvaluationWrapper.getEvaluationTimeForComponentInstance(objectInstance));
if (HASCO.this.logger.isInfoEnabled()) {
HASCO.this.logger.info("Received new solution {} with score {} from search, communicating this solution to the HASCO listeners. Number of returned unparametrized solutions is now {}/{}.", HASCO.this.serializer.serialize(solution.getComponentInstance()), score,
HASCO.this.returnedUnparametrizedComponentInstances.size(), HASCO.this.numUnparametrizedSolutions);
}
HASCO.this.updateBestSeenSolution(solution);
HASCO.this.listOfAllRecognizedSolutions.add(solution);
HASCOSolutionEvent<V> hascoSolutionEvent = new HASCOSolutionEvent<>(HASCO.this, solution);
HASCO.this.hascoSolutionEventCache.put(solutionEvent, hascoSolutionEvent);
HASCO.this.post(hascoSolutionEvent);
}
});
/* now initialize the search */
this.logger.debug("Initializing the search");
try {
IAlgorithmEvent searchInitializationEvent = this.search.nextWithException();
assert searchInitializationEvent instanceof AlgorithmInitializedEvent : "The first event emitted by the search was not the initialization event but " + searchInitializationEvent + "!";
this.logger.debug("Search has been initialized.");
this.logger.info("HASCO initialization completed. Starting to search for component instances ...");
return event;
} catch (AlgorithmException e) {
throw new AlgorithmException("HASCO initialization failed.\nOne possible reason is that the graph has no solution.", e);
}
case ACTIVE:
/* step search */
this.logger.debug("Stepping search algorithm.");
IAlgorithmEvent searchEvent = this.search.nextWithException();
this.logger.debug("Search step completed, observed {}.", searchEvent.getClass().getName());
if (searchEvent instanceof AlgorithmFinishedEvent) {
this.logger.info("The search algorithm has finished. Terminating HASCO.");
return this.terminate();
}
/* otherwise, if a solution has been found, we announce this finding to our listeners and memorize if it is a new best candidate */
else if (searchEvent instanceof EvaluatedSearchSolutionCandidateFoundEvent) {
HASCOSolutionEvent<V> hascoSolutionEvent = this.hascoSolutionEventCache.remove(searchEvent);
assert (hascoSolutionEvent != null) : "Hasco solution event has not been seen yet or cannot be retrieved from cache. " + this.hascoSolutionEventCache;
this.logger.info("Returning next solution delivered from search with score {}. Number of found (and returned) unparametrized solutions is now {} out of {} possible.", hascoSolutionEvent.getScore(), this.returnedUnparametrizedComponentInstances.size(),
this.numUnparametrizedSolutions);
return hascoSolutionEvent;
} else {
this.logger.debug("Ignoring irrelevant search event {}", searchEvent);
return searchEvent;
}
default:
throw new IllegalStateException("HASCO cannot do anything in state " + this.getState());
}
}
public IGraphGenerator<N, A> getGraphGenerator() {
return this.searchProblem.getGraphGenerator();
}
public CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> getPlanningProblem() {
return this.planningProblem;
}
@Override
public void cancel() {
if (this.isCanceled()) {
this.logger.debug("Ignoring cancel, because cancel has been triggered in the past already.");
return;
}
this.logger.info("Received cancel, first processing the cancel locally, then forwarding to search.");
super.cancel();
if (this.search != null) {
this.logger.info("Trigger cancel on search. Thread interruption flag is {}", Thread.currentThread().isInterrupted());
this.search.cancel();
}
this.logger.info("Finished, now terminating. Thread interruption flag is {}", Thread.currentThread().isInterrupted());
this.terminate();
this.logger.info("Cancel completed. Thread interruption flag is {}", Thread.currentThread().isInterrupted());
synchronized (this.cancelCompleted) {
this.cancelCompleted.set(true);
this.cancelCompleted.notifyAll();
}
}
public IHASCOPlanningReduction<N, A> getPlanningGraphGeneratorDeriver() {
return this.planning2searchReduction;
}
public HASCORunReport<V> getReport() {
return new HASCORunReport<>(this.listOfAllRecognizedSolutions);
}
@Override
protected void shutdown() {
if (this.isShutdownInitialized()) {
this.logger.debug("Shutdown has already been initialized, ignoring new shutdown request.");
return;
}
this.logger.info("Entering HASCO shutdown routine.");
super.shutdown();
this.logger.debug("Cancelling search.");
this.search.cancel();
this.logger.debug("Shutdown of HASCO completed.");
}
@Override
public HASCOConfig getConfig() {
return (HASCOConfig) super.getConfig();
}
public IOptimalPathInORGraphSearchFactory<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V, ?> getSearchFactory() {
return this.searchFactory;
}
public IOptimalPathInORGraphSearch<IPathSearchWithPathEvaluationsInput<N, A, V>, EvaluatedSearchGraphPath<N, A, V>, N, A, V> getSearch() {
return this.search;
}
@Override
public String getLoggerName() {
return this.loggerName;
}
@Override
public void setLoggerName(final String name) {
this.logger.info("Switching logger for {} from {} to {}", this.getId(), this.logger.getName(), name);
this.loggerName = name;
this.logger = LoggerFactory.getLogger(name);
this.logger.info("Activated logger for {} with name {}", this.getId(), name);
super.setLoggerName(this.loggerName + "._swConfigAlgo");
/* set logger in planning problem evaluator */
((HASCOReductionSolutionEvaluator<?>)this.planningProblem.getPlanEvaluator()).setLoggerName(name + ".planevaluator");
/* set logger in benchmark */
if (this.getInput().getCompositionEvaluator() instanceof ILoggingCustomizable) {
this.logger.info("Setting logger of HASCO solution evaluator {} to {}.solutionevaluator.", this.getInput().getCompositionEvaluator().getClass().getName(), name);
((ILoggingCustomizable) this.getInput().getCompositionEvaluator()).setLoggerName(name + ".solutionevaluator");
} else {
this.logger.info("The solution evaluator {} does not implement ILoggingCustomizable, so no customization possible.", this.getInput().getCompositionEvaluator().getClass().getName());
}
}
public void setCreateComponentInstancesFromNodesInsteadOfPlans(final boolean cIsFromNodes) {
this.createComponentInstancesFromNodesInsteadOfPlans = cIsFromNodes;
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("planningGraphGeneratorDeriver", this.planning2searchReduction);
fields.put("planningProblem", this.planningProblem);
fields.put("search", this.search);
fields.put("searchProblem", this.searchProblem);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
public void registerSolutionEventListener(final Consumer<HASCOSolutionEvent<V>> listener) {
this.registerListener(new Object() {
@Subscribe
public void receiveSolutionEvent(final HASCOSolutionEvent<V> e) {
listener.accept(e);
}
});
}
public AtomicBoolean getCancelCompleted() {
return this.cancelCompleted;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/HASCOConfig.java
|
package ai.libs.hasco.core;
import org.aeonbits.owner.Config.Sources;
import ai.libs.jaicore.basic.IOwnerBasedAlgorithmConfig;
@Sources({ "file:conf/hasco.properties" })
public interface HASCOConfig extends IOwnerBasedAlgorithmConfig {
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/HASCOSolutionCandidate.java
|
package ai.libs.hasco.core;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.api.IEvaluatedSoftwareConfigurationSolution;
import ai.libs.jaicore.planning.core.interfaces.IEvaluatedGraphSearchBasedPlan;
/**
* This is a wrapper class only used for efficient processing of solutions. For example, to lookup the annotations of a solution, we do not need the possibly costly equals method of T but only this
* class. For each solution, only one such object is created.
*
* @author fmohr
*
* @param <T>
*/
public class HASCOSolutionCandidate<V extends Comparable<V>> implements IEvaluatedSoftwareConfigurationSolution<V> {
private final IComponentInstance componentInstance;
private final IEvaluatedGraphSearchBasedPlan<?, ?, V> planningSolution;
private final int timeToEvaluateCandidate;
private final long timeOfCreation = System.currentTimeMillis();
public HASCOSolutionCandidate(final IComponentInstance componentInstance, final IEvaluatedGraphSearchBasedPlan<?, ?, V> planningSolution, final int timeToEvaluateCandidate) {
super();
this.componentInstance = componentInstance;
this.planningSolution = planningSolution;
this.timeToEvaluateCandidate = timeToEvaluateCandidate;
if (planningSolution == null) {
throw new IllegalArgumentException("HASCOSolutionCandidate cannot be created with a NULL planning solution.");
}
if (planningSolution.getSearchGraphPath() == null) {
throw new IllegalArgumentException("HASCOSolutionCandidate cannot be created with a planning solution that has a NULL path object.");
}
}
@Override
public IComponentInstance getComponentInstance() {
return this.componentInstance;
}
@Override
public V getScore() {
return this.planningSolution.getScore();
}
public int getTimeToEvaluateCandidate() {
return this.timeToEvaluateCandidate;
}
public long getTimeOfCreation() {
return this.timeOfCreation;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/HASCOUtil.java
|
package ai.libs.hasco.core;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.math3.geometry.euclidean.oned.Interval;
import org.apache.commons.math3.geometry.partitioning.Region.Location;
import org.api4.java.ai.graphsearch.problem.IPathSearchInput;
import org.api4.java.ai.graphsearch.problem.IPathSearchWithPathEvaluationsInput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.core.reduction.planning2search.IHASCOPlanningReduction;
import ai.libs.hasco.core.reduction.softcomp2planning.HASCOReduction;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.api.IParameter;
import ai.libs.jaicore.components.api.IParameterDependency;
import ai.libs.jaicore.components.api.IParameterDomain;
import ai.libs.jaicore.components.api.IRequiredInterfaceDefinition;
import ai.libs.jaicore.components.model.CategoricalParameterDomain;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.ComponentUtil;
import ai.libs.jaicore.components.model.NumericParameterDomain;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.components.model.SoftwareConfigurationProblem;
import ai.libs.jaicore.logic.fol.structure.Literal;
import ai.libs.jaicore.logic.fol.structure.LiteralParam;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.planning.classical.algorithms.strips.forward.StripsUtil;
import ai.libs.jaicore.planning.core.Action;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.CostSensitiveHTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.CostSensitivePlanningToStandardSearchProblemReduction;
import ai.libs.jaicore.search.model.other.SearchGraphPath;
import ai.libs.jaicore.search.model.travesaltree.BackPointerPath;
/**
* Utility functions in the context of HASCO algorithm selection and configurtion.
*
* @author Felix Mohr
*
*/
public class HASCOUtil {
private static final String LITERAL_RESOLVES = "resolves";
private static final String LITERAL_PARAMCONTAINER = "parameterContainer";
private static final String LITERAL_VAL = "val";
private static final String LITERAL_INTERFACEGROUPDEFINITION = "interfaceGroup"; // [component name, interface ID (role), component handle, iGroupHandle]
private static final String LITERAL_INTERFACEMEMBERSHIP = "interfaceMember"; // [iHandle, iGroupHandle]
private static final Logger logger = LoggerFactory.getLogger(HASCOUtil.class);
private HASCOUtil() {
/* avoid instantiation */
}
public static int getNumberOfUnparametrizedSolutions(final SoftwareConfigurationProblem<?> problem) {
return ComponentUtil.getNumberOfUnparametrizedCompositions(problem.getComponents(), problem.getRequiredInterface());
}
public static <N, A> IPathSearchInput<N, A> getSearchProblem(final Collection<? extends IComponent> components, final String requiredInterface, final INumericParameterRefinementConfigurationMap paramRefinementConfig,
final IHASCOPlanningReduction<N, A> plan2searchReduction) {
HASCOReduction<Double> hascoReduction = new HASCOReduction<>();
SoftwareConfigurationProblem<Double> coreProblem = new SoftwareConfigurationProblem<>(components, requiredInterface, n -> 0.0);
RefinementConfiguredSoftwareConfigurationProblem<Double> problem = new RefinementConfiguredSoftwareConfigurationProblem<>(coreProblem, paramRefinementConfig);
CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, Double> planningProblem = hascoReduction.encodeProblem(problem);
return new CostSensitivePlanningToStandardSearchProblemReduction<CEOCIPSTNPlanningProblem, N, A, Double>(plan2searchReduction).encodeProblem(planningProblem);
}
public static <V extends Comparable<V>> CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> getPlannigProblem(final RefinementConfiguredSoftwareConfigurationProblem<V> problem) {
HASCOReduction<V> hascoReduction = new HASCOReduction<>();
return hascoReduction.encodeProblem(problem);
}
public static <N, A, V extends Comparable<V>> IPathSearchWithPathEvaluationsInput<N, A, V> getSearchProblemWithEvaluation(final RefinementConfiguredSoftwareConfigurationProblem<V> problem,
final IHASCOPlanningReduction<N, A> plan2searchReduction) {
return getSearchProblemWithEvaluation(getPlannigProblem(problem), plan2searchReduction);
}
public static <N, A, V extends Comparable<V>> IPathSearchWithPathEvaluationsInput<N, A, V> getSearchProblemWithEvaluation(final CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> planningProblem,
final IHASCOPlanningReduction<N, A> plan2searchReduction) {
return new CostSensitivePlanningToStandardSearchProblemReduction<CEOCIPSTNPlanningProblem, N, A, V>(plan2searchReduction).encodeProblem(planningProblem);
}
/**
*
* @param state
* @param objectName
* @return
*/
public static Map<String, String> getParameterContainerMap(final Monom state, final String objectName) {
Map<String, String> parameterContainerMap = new HashMap<>();
List<Literal> containerLiterals = state.stream().filter(l -> l.getPropertyName().equals(LITERAL_PARAMCONTAINER) && l.getParameters().get(2).getName().equals(objectName)).collect(Collectors.toList());
containerLiterals.forEach(l -> parameterContainerMap.put(l.getParameters().get(1).getName(), l.getParameters().get(3).getName()));
return parameterContainerMap;
}
public static Map<ComponentInstance, Map<IParameter, String>> getParametrizations(final Monom state, final Collection<? extends IComponent> components, final boolean resolveIntervals) {
Map<String, ComponentInstance> objectMap = new HashMap<>();
Map<String, Map<String, String>> parameterContainerMap = new HashMap<>(); // stores for each object the name of the container of each parameter
Map<String, String> parameterValues = new HashMap<>();
Map<ComponentInstance, Map<IParameter, String>> parameterValuesPerComponentInstance = new HashMap<>();
Collection<String> overwrittenDataContainers = getOverwrittenDatacontainersInState(state);
/*
* create (empty) component instances, detect containers for parameter values, and register the
* values of the data containers
*/
for (Literal l : state) {
String[] params = l.getParameters().stream().map(LiteralParam::getName).collect(Collectors.toList()).toArray(new String[] {});
switch (l.getPropertyName()) {
case LITERAL_RESOLVES: // field 0 and 1 (parent object name and interface name) are ignored here
String componentName = params[2];
String objectName = params[3];
Optional<? extends IComponent> component = components.stream().filter(c -> c.getName().equals(componentName)).findAny();
assert component.isPresent() : "Could not find component with name " + componentName;
ComponentInstance object = new ComponentInstance(component.get(), new HashMap<>(), new HashMap<>());
objectMap.put(objectName, object);
break;
case LITERAL_PARAMCONTAINER:
if (!parameterContainerMap.containsKey(params[2])) {
parameterContainerMap.put(params[2], new HashMap<>());
}
parameterContainerMap.get(params[2]).put(params[1], params[3]);
break;
case LITERAL_VAL:
if (overwrittenDataContainers.contains(params[0])) {
parameterValues.put(params[0], params[1]);
}
break;
default:
/* simply ignore other literals */
break;
}
}
/* update the configurations of the objects */
for (Entry<String, ComponentInstance> entry : objectMap.entrySet()) {
Map<IParameter, String> paramValuesForThisComponent = new HashMap<>();
String objectName = entry.getKey();
ComponentInstance object = entry.getValue();
parameterValuesPerComponentInstance.put(object, paramValuesForThisComponent);
for (IParameter p : object.getComponent().getParameters()) {
assert parameterContainerMap.containsKey(objectName) : "No parameter container map has been defined for object " + objectName + " of component " + object.getComponent().getName() + "!";
assert parameterContainerMap.get(objectName).containsKey(p.getName()) : "The data container for parameter " + p.getName() + " of " + object.getComponent().getName() + " is not defined!";
String assignedValue = parameterValues.get(parameterContainerMap.get(objectName).get(p.getName()));
String interpretedValue = "";
if (assignedValue != null) {
if (p.getDefaultDomain() instanceof NumericParameterDomain) {
if (resolveIntervals) {
NumericParameterDomain np = (NumericParameterDomain) p.getDefaultDomain();
List<String> vals = SetUtil.unserializeList(assignedValue);
Interval interval = new Interval(Double.valueOf(vals.get(0)), Double.valueOf(vals.get(1)));
if (np.isInteger()) {
interpretedValue = String.valueOf((int) Math.round(interval.getBarycenter()));
} else {
interpretedValue = String.valueOf(interval.getBarycenter());
}
} else {
interpretedValue = assignedValue;
}
} else if (p.getDefaultDomain() instanceof CategoricalParameterDomain) {
interpretedValue = assignedValue;
} else {
throw new UnsupportedOperationException("No support for parameters of type " + p.getClass().getName());
}
paramValuesForThisComponent.put(p, interpretedValue);
}
}
}
return parameterValuesPerComponentInstance;
}
public static Collection<String> getOverwrittenDatacontainersInState(final Monom state) {
return state.stream().filter(l -> l.getPropertyName().equals("overwritten")).map(l -> l.getParameters().get(0).getName()).collect(Collectors.toSet());
}
public static Collection<String> getClosedDatacontainersInState(final Monom state) {
return state.stream().filter(l -> l.getPropertyName().equals("closed")).map(l -> l.getParameters().get(0).getName()).collect(Collectors.toSet());
}
public static Map<String, ComponentInstance> getGroundComponentsFromState(final Monom state, final Collection<? extends IComponent> components, final boolean resolveIntervals) {
Map<String, ComponentInstance> objectMap = new HashMap<>();
Map<String, Map<String, String>> parameterContainerMap = new HashMap<>(); // stores for each object the name of the container of each parameter
Map<String, String> parameterValues = new HashMap<>();
Map<String, String> interfaceMembershipMap = new HashMap<>();
Map<String, Integer> interfaceIndexMap = new HashMap<>();
Map<String, String> interfaceGroupComponentMap = new HashMap<>(); // stores for each interface group to which component instance it belongs
Map<String, String> interfaceGroupRoleMap = new HashMap<>(); // stores for each interface group the role it has for the component
Collection<String> overwrittenDatacontainers = getOverwrittenDatacontainersInState(state);
Map<String, Map<String, Map<Integer, ComponentInstance>>> orderedRequiredInterfaceMap = new HashMap<>(); // stores for each ci reference and each of its ri references the map that assigns to the slot number the concrete component
// instance
/* create (empty) component instances, detect containers for parameter values, and register the values of the data containers */
for (Literal l : state) {
String[] params = l.getParameters().stream().map(LiteralParam::getName).collect(Collectors.toList()).toArray(new String[] {});
switch (l.getPropertyName()) {
case LITERAL_RESOLVES: // field 0 and 1 (parent object name and interface name) are ignored here
String componentName = params[2];
String objectName = params[3];
Optional<? extends IComponent> component = components.stream().filter(c -> c.getName().equals(componentName)).findAny();
if (!component.isPresent()) {
throw new IllegalStateException("Error when treating literal " + l + ". The provided list of components has no component with name \"" + componentName + "\". List of known components: "
+ components.stream().map(c -> "\n\t" + c.getName()).collect(Collectors.joining()));
}
ComponentInstance object = new ComponentInstance(component.get(), new HashMap<>(), new HashMap<>());
objectMap.put(objectName, object);
break;
case LITERAL_PARAMCONTAINER:
if (!parameterContainerMap.containsKey(params[2])) {
parameterContainerMap.put(params[2], new HashMap<>());
}
parameterContainerMap.get(params[2]).put(params[1], params[3]);
break;
case LITERAL_VAL:
parameterValues.put(params[0], params[1]);
break;
case LITERAL_INTERFACEGROUPDEFINITION:
interfaceGroupComponentMap.put(params[3], params[2]);
interfaceGroupRoleMap.put(params[3], params[1]);
break;
case LITERAL_INTERFACEMEMBERSHIP: // first argument is the interface, second is the interface group it belongs to, third argument is the index of the realization
interfaceMembershipMap.put(params[0], params[1]);
interfaceIndexMap.put(params[0], Integer.parseInt(params[2]));
break;
default:
/* simply ignore other cases */
break;
}
}
/* now establish the binding of the required interfaces of the component instances */
state.stream().filter(l -> l.getPropertyName().equals(LITERAL_RESOLVES)).forEach(l -> {
String[] params = l.getParameters().stream().map(LiteralParam::getName).collect(Collectors.toList()).toArray(new String[] {});
String handleOfRequiredInterface = params[0];
String handleOfComponentInstanceThatProvidesTheInterface = params[3];
if (!handleOfRequiredInterface.equals("request")) {
if (!interfaceMembershipMap.containsKey(handleOfRequiredInterface)) {
throw new IllegalArgumentException("The state contains a literal " + l + ", which suggests that " + handleOfRequiredInterface
+ " is the handle of a requried interface. But this handle is not registered in the map. State is: " + state.stream().sorted((s1, s2) -> s1.compareTo(s2)).map(lit -> "\n\t" + lit).collect(Collectors.joining()));
}
String iFaceGroupHandle = interfaceMembershipMap.get(handleOfRequiredInterface);
String handleOfComponentInstanceThatRequiresTheInterface = interfaceGroupComponentMap.get(iFaceGroupHandle);
ComponentInstance providingCI = objectMap.get(handleOfComponentInstanceThatProvidesTheInterface);
int indexOfRealization = interfaceIndexMap.get(handleOfRequiredInterface);
orderedRequiredInterfaceMap.computeIfAbsent(handleOfComponentInstanceThatRequiresTheInterface, rci -> new HashMap<>()).computeIfAbsent(interfaceGroupRoleMap.get(iFaceGroupHandle), gri -> new HashMap<>()).put(indexOfRealization, providingCI);
}
});
for (Entry<String, ComponentInstance> entry : objectMap.entrySet()) {
ComponentInstance requiringCI = entry.getValue();
String requiringCIReference = entry.getKey();
for (IRequiredInterfaceDefinition ri : requiringCI.getComponent().getRequiredInterfaces()) {
String reqInterfaceId = ri.getId();
List<IComponentInstance> realizations = new ArrayList<>();
if (orderedRequiredInterfaceMap.containsKey(requiringCIReference) && orderedRequiredInterfaceMap.get(requiringCIReference).containsKey(reqInterfaceId)) {
Map<Integer, ComponentInstance> provisionsOfThisInterface = orderedRequiredInterfaceMap.get(requiringCIReference).get(reqInterfaceId);
int n = provisionsOfThisInterface.size();
for (int i = 1; i <= n; i++) { // indices here start at 1
if (!provisionsOfThisInterface.containsKey(i)) {
throw new IllegalArgumentException(
"The realizations of the required interface " + requiringCI + " of component " + requiringCI.getComponent().getName() + " is not consecutive! Here is the map: \n" + provisionsOfThisInterface);
}
realizations.add(provisionsOfThisInterface.get(i));
}
}
requiringCI.getSatisfactionOfRequiredInterfaces().put(reqInterfaceId, realizations);
}
}
/* set the explicitly defined parameters (e.g. overwritten containers) in the component instances */
for (Entry<String, ComponentInstance> entry : objectMap.entrySet()) {
String objectName = entry.getKey();
ComponentInstance object = entry.getValue();
for (IParameter p : object.getComponent().getParameters()) {
if (!parameterContainerMap.containsKey(objectName)) {
throw new IllegalStateException("No parameter container map has been defined for object " + objectName + " of component " + object.getComponent().getName() + "!");
}
if (!parameterContainerMap.get(objectName).containsKey(p.getName())) {
throw new IllegalStateException(
"The data container for parameter " + p.getName() + " of " + object.getComponent().getName() + " is not defined! State: " + state.stream().sorted().map(l -> "\n\t" + l).collect(Collectors.joining()));
}
String paramContainerName = parameterContainerMap.get(objectName).get(p.getName());
if (overwrittenDatacontainers.contains(paramContainerName)) {
String assignedValue = parameterValues.get(paramContainerName);
assert assignedValue != null : "parameter containers must always have a value!";
object.getParameterValues().put(p.getName(), getParamValue(p, assignedValue, resolveIntervals));
}
}
}
return objectMap;
}
public static <N, A, V extends Comparable<V>> ComponentInstance getSolutionCompositionForNode(final IHASCOPlanningReduction<N, A> planningGraphDeriver, final Collection<? extends IComponent> components, final Monom initState,
final BackPointerPath<N, A, ?> path, final boolean resolveIntervals) {
return getSolutionCompositionForPlan(components, initState, planningGraphDeriver.decodeSolution(new SearchGraphPath<>(path)), resolveIntervals);
}
public static <N, A, V extends Comparable<V>> ComponentInstance getComponentInstanceForNode(final IHASCOPlanningReduction<N, A> planningGraphDeriver, final Collection<? extends IComponent> components, final Monom initState,
final BackPointerPath<N, A, ?> path, final String name, final boolean resolveIntervals) {
return getComponentInstanceForPlan(components, initState, planningGraphDeriver.decodeSolution(new SearchGraphPath<>(path)), name, resolveIntervals);
}
public static Monom getFinalStateOfPlan(final Monom initState, final IPlan plan) {
Monom state = new Monom(initState);
for (Action a : plan.getActions()) {
StripsUtil.updateState(state, a);
}
return state;
}
public static ComponentInstance getSolutionCompositionForPlan(final Collection<? extends IComponent> components, final Monom initState, final IPlan plan, final boolean resolveIntervals) {
return getSolutionCompositionFromState(components, getFinalStateOfPlan(initState, plan), resolveIntervals);
}
public static ComponentInstance getComponentInstanceForPlan(final Collection<? extends IComponent> components, final Monom initState, final IPlan plan, final String name, final boolean resolveIntervals) {
return getComponentInstanceFromState(components, getFinalStateOfPlan(initState, plan), name, resolveIntervals);
}
public static ComponentInstance getSolutionCompositionFromState(final Collection<? extends IComponent> components, final Monom state, final boolean resolveIntervals) {
return getComponentInstanceFromState(components, state, "solution", resolveIntervals);
}
public static ComponentInstance getComponentInstanceFromState(final Collection<? extends IComponent> components, final Monom state, final String name, final boolean resolveIntervals) {
return HASCOUtil.getGroundComponentsFromState(state, components, resolveIntervals).get(name);
}
public static Map<IParameter, IParameterDomain> getUpdatedDomainsOfComponentParameters(final Monom state, final IComponent component, final String objectIdentifierInState) {
Map<String, String> parameterContainerMap = new HashMap<>();
Map<String, String> parameterContainerMapInv = new HashMap<>();
Map<String, String> parameterValues = new HashMap<>();
/* detect containers for parameter values, and register the values of the data containers */
for (Literal l : state) {
String[] params = l.getParameters().stream().map(LiteralParam::getName).collect(Collectors.toList()).toArray(new String[] {});
switch (l.getPropertyName()) {
case LITERAL_PARAMCONTAINER:
if (!params[2].equals(objectIdentifierInState)) {
continue;
}
parameterContainerMap.put(params[1], params[3]);
parameterContainerMapInv.put(params[3], params[1]);
break;
case LITERAL_VAL:
parameterValues.put(params[0], params[1]);
break;
default: // ignore other literals
break;
}
}
/* determine current values of the parameters of this component instance */
Map<IParameter, String> paramValuesForThisComponentInstance = new HashMap<>();
for (IParameter p : component.getParameters()) {
if (!parameterContainerMap.containsKey(p.getName())) {
throw new IllegalStateException("The data container for parameter " + p.getName() + " of " + objectIdentifierInState + " is not defined!");
}
String assignedValue = parameterValues.get(parameterContainerMap.get(p.getName()));
if (assignedValue == null) {
throw new IllegalStateException("No value has been assigned to parameter " + p.getName() + " stored in container " + parameterContainerMap.get(p.getName()) + " in state " + state);
}
String value = getParamValue(p, assignedValue, false);
assert value != null : "Determined value NULL for parameter " + p.getName() + ", which is not plausible.";
paramValuesForThisComponentInstance.put(p, value);
}
/* extract instance */
ComponentInstance instance = getComponentInstanceFromState(Arrays.asList(component), state, objectIdentifierInState, false);
/* now compute the new domains based on the current values */
return getUpdatedDomainsOfComponentParameters(instance);
}
private static String getParamValue(final IParameter p, final String assignedValue, final boolean resolveIntervals) {
if (assignedValue == null) {
throw new IllegalArgumentException("Cannot determine true value for assigned param value " + assignedValue + " for parameter " + p.getName());
}
String interpretedValue = "";
if (p.isNumeric()) {
if (resolveIntervals) {
NumericParameterDomain np = (NumericParameterDomain) p.getDefaultDomain();
List<String> vals = SetUtil.unserializeList(assignedValue);
Interval interval = new Interval(Double.valueOf(vals.get(0)), Double.valueOf(vals.get(1)));
interpretedValue = String.valueOf(interval.checkPoint((double) p.getDefaultValue(), 0.001) == Location.OUTSIDE ? interval.getBarycenter() : (double) p.getDefaultValue());
if (np.isInteger()) {
interpretedValue = String.valueOf((int) Math.round(Double.parseDouble(interpretedValue)));
}
} else {
interpretedValue = assignedValue;
}
} else if (p.getDefaultDomain() instanceof CategoricalParameterDomain) {
interpretedValue = assignedValue;
} else {
throw new UnsupportedOperationException("No support for parameters of type " + p.getClass().getName());
}
return interpretedValue;
}
public static Map<IParameter, IParameterDomain> getUpdatedDomainsOfComponentParameters(final ComponentInstance componentInstance) {
IComponent component = componentInstance.getComponent();
/* initialize all params for which a decision has been made already with their respective value */
Map<IParameter, IParameterDomain> domains = new HashMap<>();
for (IParameter p : componentInstance.getParametersThatHaveBeenSetExplicitly()) {
if (p.isNumeric()) {
NumericParameterDomain defaultDomain = (NumericParameterDomain) p.getDefaultDomain();
Interval interval = SetUtil.unserializeInterval(componentInstance.getParameterValue(p));
domains.put(p, new NumericParameterDomain(defaultDomain.isInteger(), interval.getInf(), interval.getSup()));
} else if (p.isCategorical()) {
domains.put(p, new CategoricalParameterDomain(new String[] { componentInstance.getParameterValue(p) }));
}
}
/* initialize all others with the default domain */
for (IParameter p : componentInstance.getParametersThatHaveNotBeenSetExplicitly()) {
domains.put(p, p.getDefaultDomain());
}
assert (domains.keySet().equals(component.getParameters())) : "There are parameters for which no current domain was derived.";
/* update domains based on the dependencies defined for this component */
for (IParameterDependency dependency : component.getParameterDependencies()) {
if (ai.libs.jaicore.components.model.CompositionProblemUtil.isDependencyPremiseSatisfied(dependency, domains)) {
logger.info("Premise of dependency {} is satisfied, applying its conclusions ...", dependency);
for (Pair<IParameter, IParameterDomain> newDomain : dependency.getConclusion()) {
/*
* directly use the concluded domain if the current value is NOT subsumed by it. Otherwise, just
* stick to the current domain
*/
IParameter param = newDomain.getX();
IParameterDomain concludedDomain = newDomain.getY();
if (!componentInstance.getParametersThatHaveBeenSetExplicitly().contains(param)) {
domains.put(param, concludedDomain);
logger.debug("Changing domain of {} from {} to {}", param, domains.get(param), concludedDomain);
} else {
logger.debug("Not changing domain of {} since it has already been set explicitly in the past.", param);
}
}
} else {
logger.debug("Ignoring unsatisfied dependency {}.", dependency);
}
}
return domains;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/IHascoAware.java
|
package ai.libs.hasco.core;
/**
* Classes can implement this interface if they want to be informed about the HASCO instance in which they are used.
*
* @author fmohr
*
*/
public interface IHascoAware {
public void setHascoReference(HASCO<?, ?, ?> hasco);
public HASCO<?, ?, ?> getHASCOReference();
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/HASCORunStartedEvent.java
|
package ai.libs.hasco.core.events;
import org.api4.java.algorithm.IAlgorithm;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent;
public class HASCORunStartedEvent<T, V extends Comparable<V>> extends AlgorithmInitializedEvent {
private final int seed;
private final int timeout;
private final int numberOfCPUS;
private IObjectEvaluator<T, V> benchmark;
public HASCORunStartedEvent(final IAlgorithm<?, ?> algorithm, final int seed, final int timeout, final int numberOfCPUS, final IObjectEvaluator<T, V> benchmark) {
super(algorithm);
this.seed = seed;
this.timeout = timeout;
this.numberOfCPUS = numberOfCPUS;
this.benchmark = benchmark;
}
public IObjectEvaluator<T, V> getBenchmark() {
return this.benchmark;
}
public void setBenchmark(final IObjectEvaluator<T, V> benchmark) {
this.benchmark = benchmark;
}
public int getSeed() {
return this.seed;
}
public int getTimeout() {
return this.timeout;
}
public int getNumberOfCPUS() {
return this.numberOfCPUS;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/HASCORunTerminatedEvent.java
|
package ai.libs.hasco.core.events;
import ai.libs.jaicore.components.model.ComponentInstance;
public class HASCORunTerminatedEvent<T, V extends Comparable<V>> {
private final T returnedSolution;
private final ComponentInstance compositionOfSolution;
private final V score;
public HASCORunTerminatedEvent(final ComponentInstance composition, final T returnedSolution, final V score) {
super();
this.compositionOfSolution = composition;
this.returnedSolution = returnedSolution;
this.score = score;
}
public ComponentInstance getCompositionOfSolution() {
return this.compositionOfSolution;
}
public T getReturnedSolution() {
return this.returnedSolution;
}
public V getScore() {
return this.score;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/HASCOSolutionEvent.java
|
package ai.libs.hasco.core.events;
import org.api4.java.algorithm.IAlgorithm;
import org.api4.java.algorithm.events.result.IScoredSolutionCandidateFoundEvent;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.jaicore.basic.algorithm.ASolutionCandidateFoundEvent;
public class HASCOSolutionEvent<V extends Comparable<V>> extends ASolutionCandidateFoundEvent<HASCOSolutionCandidate<V>> implements IScoredSolutionCandidateFoundEvent<HASCOSolutionCandidate<V>, V> {
public HASCOSolutionEvent(final IAlgorithm<?, ?> algorithm, final HASCOSolutionCandidate<V> solutionCandidate) {
super(algorithm, solutionCandidate);
}
@Override
public V getScore() {
return this.getSolutionCandidate().getScore();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/TwoPhaseHASCOPhaseSwitchEvent.java
|
package ai.libs.hasco.core.events;
import org.api4.java.algorithm.IAlgorithm;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
public class TwoPhaseHASCOPhaseSwitchEvent extends AAlgorithmEvent {
public TwoPhaseHASCOPhaseSwitchEvent(final IAlgorithm<?, ?> algorithm) {
super(algorithm);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/TwoPhaseHASCOSelectionPhaseSkippedEvent.java
|
package ai.libs.hasco.core.events;
import org.api4.java.algorithm.IAlgorithm;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
public class TwoPhaseHASCOSelectionPhaseSkippedEvent extends AAlgorithmEvent {
public TwoPhaseHASCOSelectionPhaseSkippedEvent(final IAlgorithm<?, ?> algorithm) {
super(algorithm);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/events/TwoPhaseHASCOSolutionEvaluationEvent.java
|
package ai.libs.hasco.core.events;
import ai.libs.hasco.twophase.TwoPhaseHASCO;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
import ai.libs.jaicore.components.api.IComponentInstance;
public class TwoPhaseHASCOSolutionEvaluationEvent extends AAlgorithmEvent {
private final IComponentInstance componentInstance;
private final double score;
public TwoPhaseHASCOSolutionEvaluationEvent(final TwoPhaseHASCO<?, ?> algorithm, final IComponentInstance componentInstance, final double score) {
super(algorithm);
this.componentInstance = componentInstance;
this.score = score;
}
public IComponentInstance getComponentInstance() {
return this.componentInstance;
}
public double getScore() {
return this.score;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/predicate/IsNotRefinablePredicate.java
|
package ai.libs.hasco.core.predicate;
import java.util.Collection;
import java.util.List;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.logic.fol.structure.ConstantParam;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.logic.fol.theories.EvaluablePredicate;
public class IsNotRefinablePredicate implements EvaluablePredicate {
private final IsValidParameterRangeRefinementPredicate p;
public IsNotRefinablePredicate(final Collection<? extends IComponent> components, final INumericParameterRefinementConfigurationMap refinementConfiguration) {
super();
this.p = new IsValidParameterRangeRefinementPredicate(components, refinementConfiguration);
}
@Override
public Collection<List<ConstantParam>> getParamsForPositiveEvaluation(final Monom state, final ConstantParam... partialGrounding) {
throw new UnsupportedOperationException();
}
@Override
public boolean isOracable() {
return false;
}
@Override
public Collection<List<ConstantParam>> getParamsForNegativeEvaluation(final Monom state, final ConstantParam... partialGrounding) {
throw new UnsupportedOperationException();
}
@Override
public boolean test(final Monom state, final ConstantParam... params) {
return this.p.getParamsForPositiveEvaluation(state, params[0], params[1], params[2], params[3], params[4], null).isEmpty();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/predicate/IsRefinementCompletedPredicate.java
|
package ai.libs.hasco.core.predicate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.NotImplementedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfiguration;
import ai.libs.jaicore.components.api.IParameter;
import ai.libs.jaicore.components.model.CategoricalParameterDomain;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.NumericParameterDomain;
import ai.libs.jaicore.logic.fol.structure.ConstantParam;
import ai.libs.jaicore.logic.fol.structure.Literal;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.logic.fol.theories.EvaluablePredicate;
public class IsRefinementCompletedPredicate implements EvaluablePredicate {
private final Logger logger = LoggerFactory.getLogger(IsRefinementCompletedPredicate.class);
private final Collection<IComponent> components;
private final INumericParameterRefinementConfigurationMap refinementConfiguration;
public IsRefinementCompletedPredicate(final Collection<? extends IComponent> components, final INumericParameterRefinementConfigurationMap refinementConfiguration) {
super();
this.components = new ArrayList<>(components);
this.refinementConfiguration = refinementConfiguration;
}
@Override
public Collection<List<ConstantParam>> getParamsForPositiveEvaluation(final Monom state, final ConstantParam... partialGrounding) {
throw new NotImplementedException("This is not an oracable predicate!");
}
@Override
public boolean isOracable() {
return false;
}
@Override
public Collection<List<ConstantParam>> getParamsForNegativeEvaluation(final Monom state, final ConstantParam... partialGrounding) {
throw new UnsupportedOperationException();
}
@Override
public boolean test(final Monom state, final ConstantParam... params) {
/* initialize routine */
if (params.length != 2) {
throw new IllegalArgumentException("There should be exactly two parameters additional to the state but " + params.length + " were provided: " + Arrays.toString(params)
+ ". This parameters refer to the component name that is being configured and the object itself.");
}
if (params[0] == null) {
throw new IllegalArgumentException("The component name must not be null.");
}
if (params[1] == null) {
throw new IllegalArgumentException("The component instance reference must not be null.");
}
final String objectContainer = params[1].getName();
/* determine current values for the params */
ComponentInstance groundComponent = HASCOUtil.getGroundComponentsFromState(state, this.components, false).get(objectContainer);
IComponent component = groundComponent.getComponent();
Map<String, String> componentParamContainers = HASCOUtil.getParameterContainerMap(state, objectContainer);
for (IParameter param : component.getParameters()) {
String containerOfParam = componentParamContainers.get(param.getName());
String currentValueOfParam = groundComponent.getParameterValue(param);
boolean variableHasBeenSet = state.contains(new Literal("overwritten('" + containerOfParam + "')"));
boolean variableHasBeenClosed = state.contains(new Literal("closed('" + containerOfParam + "')"));
assert variableHasBeenSet == groundComponent.getParametersThatHaveBeenSetExplicitly().contains(param);
assert !variableHasBeenClosed || variableHasBeenSet : "Parameter " + param.getName() + " of component " + component.getName() + " with default domain " + param.getDefaultDomain() + " has been closed but no value has been set.";
INumericParameterRefinementConfiguration refinementConfig = this.refinementConfiguration.getRefinement(component, param);
if (param.isNumeric()) {
double min = 0;
double max = 0;
if (currentValueOfParam != null) {
List<String> interval = SetUtil.unserializeList(currentValueOfParam);
min = Double.parseDouble(interval.get(0));
max = Double.parseDouble(interval.get(1));
} else {
min = ((NumericParameterDomain) param.getDefaultDomain()).getMin();
max = ((NumericParameterDomain) param.getDefaultDomain()).getMax();
}
double lengthStopCriterion = refinementConfig.getIntervalLength();
double length = max - min;
if (refinementConfig.isInitRefinementOnLogScale() && (max / min - 1) > lengthStopCriterion || !refinementConfig.isInitRefinementOnLogScale() && length > lengthStopCriterion) {
this.logger.info("Test for isRefinementCompletedPredicate({},{}) is negative. Interval length of [{},{}] is {}. Required length to consider an interval atomic is {}", params[0].getName(), objectContainer, min, max,
length, refinementConfig.getIntervalLength());
return false;
}
} else if (param.getDefaultDomain() instanceof CategoricalParameterDomain) { // categorical params can be refined iff the have not been set and closed before
assert param.getDefaultValue() != null : "Param " + param.getName() + " has no default value!";
if (!variableHasBeenSet && !variableHasBeenClosed) {
this.logger.info("Test for isRefinementCompletedPredicate({},{}) is negative", params[0].getName(), objectContainer);
return false;
}
} else {
throw new UnsupportedOperationException("Currently no support for testing parameters of type " + param.getClass().getName());
}
}
this.logger.info("Test for isRefinementCompletedPredicate({},{}) is positive", params[0].getName(), objectContainer);
return true;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/predicate/IsValidParameterRangeRefinementPredicate.java
|
package ai.libs.hasco.core.predicate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.math3.geometry.euclidean.oned.Interval;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfiguration;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.api.IParameter;
import ai.libs.jaicore.components.api.IParameterDomain;
import ai.libs.jaicore.components.model.CategoricalParameterDomain;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.NumericParameterDomain;
import ai.libs.jaicore.logic.fol.structure.ConstantParam;
import ai.libs.jaicore.logic.fol.structure.Literal;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.logic.fol.theories.EvaluablePredicate;
import ai.libs.jaicore.logic.fol.util.LogicUtil;
public class IsValidParameterRangeRefinementPredicate implements EvaluablePredicate {
private final Logger logger = LoggerFactory.getLogger(IsValidParameterRangeRefinementPredicate.class);
private final Collection<IComponent> components;
private final INumericParameterRefinementConfigurationMap refinementConfiguration;
private final Map<ComponentInstance, Double> knownCompositionsAndTheirScore = new HashMap<>();
public IsValidParameterRangeRefinementPredicate(final Collection<? extends IComponent> components, final INumericParameterRefinementConfigurationMap refinementConfiguration) {
super();
this.components = new ArrayList<>(components);
this.refinementConfiguration = refinementConfiguration;
}
@Override
public Collection<List<ConstantParam>> getParamsForPositiveEvaluation(final Monom state, final ConstantParam... partialGrounding) {
this.logger.info("Computing params that evaluate isValidParameterRangeRefinement positively in state with hash code {}.", state.hashCode());
/* determine the context for which the interval refinement should be oracled */
if (partialGrounding.length != 6) {
throw new IllegalArgumentException("The interpreted predicate " + this.getClass().getName() + " requires 6 arguments when oracled but " + partialGrounding.length + " have been provided!");
}
String componentName = partialGrounding[0].getName();
String componentIdentifier = partialGrounding[1].getName();
String parameterName = partialGrounding[2].getName();
IComponent component;
Optional<IComponent> searchedComponent = this.components.stream().filter(c -> c.getName().equals(componentName)).findAny();
if (searchedComponent.isPresent()) {
component = searchedComponent.get();
} else {
throw new IllegalArgumentException("Could not find matching component.");
}
Optional<IParameter> optParam = component.getParameters().stream().filter(p -> p.getName().equals(parameterName)).findAny();
IParameter param;
if (optParam.isPresent()) {
param = optParam.get();
} else {
throw new IllegalArgumentException("Could not find required parameter");
}
List<ConstantParam> partialGroundingAsList = Arrays.asList(partialGrounding);
String containerName = partialGrounding[3].getName();
String currentParamValue = partialGrounding[4].getName(); // this is not really used, because the current value is again read from the state
this.logger.info("Determining positive evaluations for isValidParameterRangeRefinementPredicate({},{},{},{},{},{})", componentName, componentIdentifier, parameterName, containerName, currentParamValue, partialGrounding[5]);
boolean hasBeenSetBefore = state.contains(new Literal("overwritten('" + containerName + "')"));
/* determine component instance and the true domain of parameter */
ComponentInstance instance = HASCOUtil.getComponentInstanceFromState(this.components, state, componentIdentifier, false);
this.logger.debug("Derived component instance to be refined: {}. Parameter to refine: {}. Current value of parameter: {}", instance, param, currentParamValue);
try {
Map<IParameter, IParameterDomain> paramDomains = HASCOUtil.getUpdatedDomainsOfComponentParameters(instance);
if (this.logger.isDebugEnabled()) {
this.logger.debug("Parameter domains are: {}", paramDomains.keySet().stream().map(k -> "\n\t" + k + ": " + paramDomains.get(k)).collect(Collectors.joining()));
}
/* determine refinements for numeric parameters */
if (param.isNumeric()) {
NumericParameterDomain currentlyActiveDomain = (NumericParameterDomain) paramDomains.get(param);
Interval currentInterval = new Interval(currentlyActiveDomain.getMin(), currentlyActiveDomain.getMax());
assert (!hasBeenSetBefore || (currentInterval.getInf() == Double.valueOf(SetUtil.unserializeList(currentParamValue).get(0)) && currentInterval.getSup() == Double
.valueOf(SetUtil.unserializeList(currentParamValue).get(1)))) : "The derived currently active domain of an explicitly set parameter deviates from the domain specified in the state!";
INumericParameterRefinementConfiguration refinementConfig = this.refinementConfiguration.getRefinement(component, param);
if (refinementConfig == null) {
throw new IllegalArgumentException("No refinement configuration for parameter \"" + parameterName + "\" of component \"" + componentName + "\" has been supplied!");
}
if (refinementConfig.getIntervalLength() <= 0) {
throw new IllegalArgumentException("The interval length configured for param " + parameterName + " of component " + componentName + " must be strictly positive but is " + refinementConfig.getIntervalLength());
}
/* if the interval is under the distinction threshold, return an empty list of possible refinements (predicate will always be false here) */
double relativeLength = (currentInterval.getSup() / currentInterval.getInf() - 1);
double absoluteLength = currentInterval.getSup() - currentInterval.getInf();
boolean isAtomicInterval = refinementConfig.isInitRefinementOnLogScale() && relativeLength <= refinementConfig.getIntervalLength()
|| !refinementConfig.isInitRefinementOnLogScale() && absoluteLength <= refinementConfig.getIntervalLength();
if (isAtomicInterval) {
this.logger.info("Returning an empty list as this is a numeric parameter that has been narrowed sufficiently. Required interval length is {}, and actual interval length is {}", refinementConfig.getIntervalLength(),
currentInterval.getSup() - currentInterval.getInf());
if (!hasBeenSetBefore) {
List<Interval> unmodifiedRefinement = new ArrayList<>();
unmodifiedRefinement.add(currentInterval);
return this.getGroundingsForIntervals(unmodifiedRefinement, partialGroundingAsList);
}
return new ArrayList<>();
} else {
this.logger.debug("Current interval [{},{}] is not considered atomic. Relative length is {}, and absolute length is {}", currentInterval.getInf(), currentInterval.getSup(), relativeLength, absoluteLength);
}
/* if this is an integer and the number of comprised integers are at most as many as the branching factor, enumerate them */
if (currentlyActiveDomain.isInteger() && (Math.floor(currentInterval.getSup()) - Math.ceil(currentInterval.getInf()) + 1 <= refinementConfig.getRefinementsPerStep())) {
List<Interval> proposedRefinements = new ArrayList<>();
for (int i = (int) Math.ceil(currentInterval.getInf()); i <= (int) Math.floor(currentInterval.getSup()); i++) {
proposedRefinements.add(new Interval(i, i));
}
this.logger.info("Ultimate level of integer refinement reached. Returning refinements: {}.", proposedRefinements.stream().map(i -> "[" + i.getInf() + ", " + i.getSup() + "]").collect(Collectors.toList()));
return this.getGroundingsForIntervals(proposedRefinements, partialGroundingAsList);
}
/* if this parameter is to be refined on a linear scale, enter this block */
if (hasBeenSetBefore || !refinementConfig.isInitRefinementOnLogScale()) {
List<Interval> proposedRefinements = this.refineOnLinearScale(currentInterval, refinementConfig.getRefinementsPerStep(), refinementConfig.getIntervalLength(), refinementConfig.isInitRefinementOnLogScale(),
refinementConfig.isInitWithExtremalPoints() && !hasBeenSetBefore);
for (Interval proposedRefinement : proposedRefinements) {
assert proposedRefinement.getInf() >= currentInterval.getInf() && proposedRefinement.getSup() <= currentInterval.getSup() : "The proposed refinement [" + proposedRefinement.getInf() + ", "
+ proposedRefinement.getSup() + "] is not a sub-interval of " + currentParamValue + "].";
assert !hasBeenSetBefore || !proposedRefinement.equals(currentInterval) : "No real refinement of parameter " + parameterName + " of component " + componentName + " with min interval size " + refinementConfig.getIntervalLength() + "! Intervals [" + currentInterval.getInf() + ", " + currentInterval.getSup() + "] and [" + proposedRefinement.getInf() + ", " + proposedRefinement.getSup() + "] are identical. State description: " + LogicUtil.getSortedLiteralSetDescription(state);
}
this.logger.info("Returning linear refinements: {}.", proposedRefinements.stream().map(i -> "[" + i.getInf() + ", " + i.getSup() + "]").collect(Collectors.toList()));
return this.getGroundingsForIntervals(proposedRefinements, partialGroundingAsList);
}
/* if this is a log-scale parameter, compute the focus value and the other intervals */
Optional<Literal> focusPredicate = state.stream()
.filter(l -> l.getPropertyName().equals("parameterFocus") && l.getParameters().get(0).getName().equals(componentIdentifier) && l.getParameters().get(1).getName().equals(parameterName)).findAny();
if (!focusPredicate.isPresent()) {
throw new IllegalArgumentException("The given state does not specify a parameter focus for the log-scale parameter " + parameterName + " on object \"" + componentIdentifier + "\"");
}
double focus = Double.parseDouble(focusPredicate.get().getParameters().get(2).getName());
if (refinementConfig.getLogBasis() <= 1) {
throw new UnsupportedOperationException(
"The basis for log-scaled parameter " + param.getName() + " of component " + instance.getComponent().getName() + " must be strictly greater than 1 (but is " + refinementConfig.getLogBasis() + ").");
}
List<Interval> proposedRefinements = this.refineOnLogScale(currentInterval, refinementConfig.getRefinementsPerStep(), refinementConfig.getLogBasis(), focus, refinementConfig.isInitWithExtremalPoints() && !hasBeenSetBefore);
for (Interval proposedRefinement : proposedRefinements) {
double epsilon = 1E-7;
assert proposedRefinement.getInf() + epsilon >= currentInterval.getInf() && proposedRefinement.getSup() <= currentInterval.getSup() + epsilon : "The proposed refinement [" + proposedRefinement.getInf() + ", "
+ proposedRefinement.getSup() + "] is not a sub-interval of " + currentParamValue + "].";
assert !proposedRefinement.equals(currentInterval) : "No real refinement! Intervals are identical.";
}
this.logger.info("Returning log-scale refinements with focus point {}: {}.", focus, proposedRefinements.stream().map(i -> "[" + i.getInf() + ", " + i.getSup() + "]").collect(Collectors.toList()));
return this.getGroundingsForIntervals(proposedRefinements, partialGroundingAsList);
} else if (param.isCategorical()) {
List<String> possibleValues = new ArrayList<>();
if (hasBeenSetBefore) {
this.logger.info("Returning empty list since param has been set before.");
return new ArrayList<>();
}
for (Object valAsObject : ((CategoricalParameterDomain) paramDomains.get(param)).getValues()) {
possibleValues.add(valAsObject.toString());
}
this.logger.info("Returning possible values {}.", possibleValues);
return this.getGroundingsForOracledValues(possibleValues, partialGroundingAsList);
} else {
throw new UnsupportedOperationException("Currently no support for parameters of class \"" + param.getClass().getName() + "\"");
}
} catch (Exception e) {
this.logger.error("Unexpected exception observed", e);
}
return new ArrayList<>();
}
private Collection<List<ConstantParam>> getGroundingsForIntervals(final List<Interval> refinements, final List<ConstantParam> partialGrounding) {
List<String> paramValues = new ArrayList<>();
for (Interval oracledInterval : refinements) {
paramValues.add("[" + oracledInterval.getInf() + ", " + oracledInterval.getSup() + "]");
}
return this.getGroundingsForOracledValues(paramValues, partialGrounding);
}
private Collection<List<ConstantParam>> getGroundingsForOracledValues(final List<String> refinements, final List<ConstantParam> partialGrounding) {
Collection<List<ConstantParam>> groundings = new ArrayList<>();
for (String oracledValue : refinements) {
List<ConstantParam> grounding = new ArrayList<>(partialGrounding);
grounding.set(5, new ConstantParam(oracledValue));
groundings.add(grounding);
}
return groundings;
}
public void informAboutNewSolution(final ComponentInstance solution, final double score) {
this.knownCompositionsAndTheirScore.put(solution, score);
}
@Override
public boolean isOracable() {
return true;
}
@Override
public Collection<List<ConstantParam>> getParamsForNegativeEvaluation(final Monom state, final ConstantParam... partialGrounding) {
throw new UnsupportedOperationException();
}
@Override
public boolean test(final Monom state, final ConstantParam... params) {
throw new NotImplementedException("Testing the validity-predicate is currently not supported. This is indirectly possible using the oracle.");
}
public List<Interval> refineOnLinearScale(final Interval interval, final int maxNumberOfSubIntervals, final double minimumLengthOfIntervals, final boolean wasInitiallyLogarithmic, final boolean createPointIntervalsForExtremalValues) {
double min = interval.getInf();
double max = interval.getSup();
double length = max - min;
double logLength = max / min - 1;
double relevantLength = wasInitiallyLogarithmic ? logLength : length;
List<Interval> intervals = new ArrayList<>();
this.logger.debug("Refining interval [{}, {}] in a linear fashion. Was initially refined on log-scale: {}", min, max, wasInitiallyLogarithmic);
/* if no refinement is possible, return just the interval itself */
if (relevantLength <= minimumLengthOfIntervals) {
intervals.add(interval);
if (createPointIntervalsForExtremalValues) {
intervals.add(0, new Interval(min, min));
intervals.add(new Interval(max, max));
}
return intervals;
}
/* otherwise compute the sub-intervals */
int numberOfIntervals = Math.min((int) Math.ceil(relevantLength / minimumLengthOfIntervals), maxNumberOfSubIntervals);
if (createPointIntervalsForExtremalValues) {
numberOfIntervals -= 2;
}
numberOfIntervals = Math.max(numberOfIntervals, 1);
this.logger.trace("Splitting interval of length {} and log-length {} into {} sub-intervals.", length, logLength, numberOfIntervals);
double stepSize = length / numberOfIntervals;
for (int i = 0; i < numberOfIntervals; i++) {
intervals.add(new Interval(Math.max(min, min + i * stepSize), Math.min(max, min + ((i + 1) * stepSize))));
}
if (createPointIntervalsForExtremalValues) {
intervals.add(0, new Interval(min, min));
intervals.add(new Interval(max, max));
}
this.logger.trace("Derived sub-intervals {}", intervals.stream().map(i -> "[" + i.getInf() + ", " + i.getSup() + "]").collect(Collectors.toList()));
return intervals;
}
public List<Interval> refineOnLogScale(final Interval interval, final int numSubIntervals, final double basis, final double pointOfConcentration, final boolean createPointIntervalsForExtremalValues) {
List<Interval> list = new ArrayList<>();
double min = interval.getInf();
double max = interval.getSup();
this.logger.debug("Received call to create {} log-scaled sub-intervals for interval [{},{}] to the basis {}.", numSubIntervals, min, max, basis);
double length = max - min;
/* if the point of concentration is exactly on the left or the right of the interval, conduct the standard technique */
if (pointOfConcentration <= min || pointOfConcentration >= max) {
int numOfGeneratedSubIntervals = numSubIntervals;
if (createPointIntervalsForExtremalValues) {
numOfGeneratedSubIntervals -= 2;
}
if (numOfGeneratedSubIntervals <= 0) {
throw new IllegalArgumentException("Number of created sub-intervals must be strictly positive but is " + numOfGeneratedSubIntervals + ".");
}
double lengthOfShortestInterval = length * (1 - basis) / (1 - Math.pow(basis, numOfGeneratedSubIntervals));
while (lengthOfShortestInterval < 1.0E-10) {
this.logger.trace("Initial interval would have size {} for a total number of {} sub-intervals, but length must be at least 10^-10. Reducing the number by 1.", lengthOfShortestInterval, numOfGeneratedSubIntervals);
numOfGeneratedSubIntervals--;
lengthOfShortestInterval = length * (1 - basis) / (1 - Math.pow(basis, numOfGeneratedSubIntervals));
}
this.logger.debug("Generating {} log-scaled sub-intervals for interval [{},{}] to the basis {}. Length of shortest interval is {}", numOfGeneratedSubIntervals, min, max, basis, lengthOfShortestInterval);
if (pointOfConcentration <= min) {
double endOfLast = min;
for (int i = 0; i < numOfGeneratedSubIntervals; i++) {
double start = endOfLast;
assert start >= min;
endOfLast = start + Math.pow(basis, i) * lengthOfShortestInterval;
assert endOfLast <= max : "Sub-Interval must not assume values greater than a vaule of the original interval.";
if (endOfLast <= start) {
throw new IllegalArgumentException("Interval size for [" + start + ", " + (start + Math.pow(basis, i) * lengthOfShortestInterval) + "] is not positive.");
}
list.add(new Interval(start, endOfLast));
this.logger.trace("Added interval [{}, {}]", start, endOfLast);
}
} else {
double endOfLast = max;
for (int i = 0; i < numOfGeneratedSubIntervals; i++) {
double start = endOfLast;
endOfLast = start - Math.pow(basis, i) * lengthOfShortestInterval;
list.add(new Interval(endOfLast, start));
}
Collections.reverse(list);
}
if (createPointIntervalsForExtremalValues) {
list.add(0, new Interval(min, min));
list.add(new Interval(max, max));
}
return list;
}
/* if the point of concentration is in the inner of the interval, split the interval correspondingly and recursively solve the problem */
double distanceFromMinToFocus = Math.abs(interval.getInf() - pointOfConcentration);
int segmentsForLeft = (int) Math.max(1, Math.floor(numSubIntervals * distanceFromMinToFocus / length));
if (createPointIntervalsForExtremalValues) {
segmentsForLeft += 2;
}
int segmentsForRight = numSubIntervals - segmentsForLeft;
assert segmentsForRight >= 1;
if (!createPointIntervalsForExtremalValues || segmentsForRight < 3) {
throw new IllegalArgumentException("No refinement possible if interval points are not included or segments for the right are less than 3");
}
this.logger.debug("Focus {} is inside the given interval. Create two partitions, one on the left ({} segments), and one on the right ({} segments).", pointOfConcentration, segmentsForLeft, segmentsForRight);
list.addAll(this.refineOnLogScale(new Interval(min, pointOfConcentration), segmentsForLeft, basis, pointOfConcentration, createPointIntervalsForExtremalValues));
list.addAll(this.refineOnLogScale(new Interval(pointOfConcentration, max), segmentsForRight, basis, pointOfConcentration, createPointIntervalsForExtremalValues));
return list;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction/planning2search/DefaultHASCOPlanningReduction.java
|
package ai.libs.hasco.core.reduction.planning2search;
import java.util.HashMap;
import java.util.Map;
import org.api4.java.datastructure.graph.ILabeledPath;
import ai.libs.jaicore.logging.ToJSONStringUtil;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.IHierarchicalPlanningToGraphSearchReduction;
import ai.libs.jaicore.search.probleminputs.GraphSearchInput;
/**
* This class only serves to facilitate the usage of HASCO when passing a IPlanningGraphGeneratorDeriver.
* HASCO requires a IHASCOPlanningGraphGeneratorDeriver, which only takes away some of the generics of IPlanningGraphGeneratorDeriver,
* but this implies that you cannot just use arbitrary IPlanningGraphGeneratorDeriver objects anymore.
* To circumvent this problem, this class implements the IHASCOPlanningGraphGeneratorDeriver and wraps any IPlanningGraphGeneratorDeriver.
*
* @author fmohr
*
* @param <N>
* @param <A>
*/
public class DefaultHASCOPlanningReduction<N, A> implements IHASCOPlanningReduction<N, A> {
private final IHierarchicalPlanningToGraphSearchReduction<N, A, ? super CEOCIPSTNPlanningProblem, ? extends IPlan, ? extends GraphSearchInput<N,A>, ? super ILabeledPath<N, A>> wrappedDeriver;
public DefaultHASCOPlanningReduction(final IHierarchicalPlanningToGraphSearchReduction<N, A, ? super CEOCIPSTNPlanningProblem, ? extends IPlan, ? extends GraphSearchInput<N,A>, ? super ILabeledPath<N, A>> wrappedDeriver) {
super();
this.wrappedDeriver = wrappedDeriver;
}
@Override
public GraphSearchInput<N, A> encodeProblem(final CEOCIPSTNPlanningProblem problem) {
return this.wrappedDeriver.encodeProblem(problem);
}
@Override
public IPlan decodeSolution(final ILabeledPath<N, A> path) {
return this.wrappedDeriver.decodeSolution(path);
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("wrappedDeriver", this.wrappedDeriver);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction/planning2search/IHASCOPlanningReduction.java
|
package ai.libs.hasco.core.reduction.planning2search;
import org.api4.java.datastructure.graph.ILabeledPath;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.htn.IHierarchicalPlanningToGraphSearchReduction;
import ai.libs.jaicore.search.probleminputs.GraphSearchInput;
public interface IHASCOPlanningReduction<N, A> extends IHierarchicalPlanningToGraphSearchReduction<N, A, CEOCIPSTNPlanningProblem, IPlan, GraphSearchInput<N,A>, ILabeledPath<N,A>> {
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction/softcomp2planning/HASCOReduction.java
|
package ai.libs.hasco.core.reduction.softcomp2planning;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.api4.java.datastructure.graph.implicit.IGraphGenerator;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.hasco.core.predicate.IsNotRefinablePredicate;
import ai.libs.hasco.core.predicate.IsRefinementCompletedPredicate;
import ai.libs.hasco.core.predicate.IsValidParameterRangeRefinementPredicate;
import ai.libs.hasco.core.reduction.planning2search.IHASCOPlanningReduction;
import ai.libs.jaicore.basic.algorithm.reduction.AlgorithmicProblemReduction;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.api.IParameter;
import ai.libs.jaicore.components.api.IRequiredInterfaceDefinition;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.NumericParameterDomain;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.logic.fol.structure.CNFFormula;
import ai.libs.jaicore.logic.fol.structure.ConstantParam;
import ai.libs.jaicore.logic.fol.structure.Literal;
import ai.libs.jaicore.logic.fol.structure.LiteralParam;
import ai.libs.jaicore.logic.fol.structure.Monom;
import ai.libs.jaicore.logic.fol.structure.VariableParam;
import ai.libs.jaicore.logic.fol.theories.EvaluablePredicate;
import ai.libs.jaicore.planning.classical.problems.ceoc.CEOCOperation;
import ai.libs.jaicore.planning.core.interfaces.IEvaluatedPlan;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningDomain;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.CEOCIPSTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.ceocipstn.OCIPMethod;
import ai.libs.jaicore.planning.hierarchical.problems.htn.CostSensitiveHTNPlanningProblem;
import ai.libs.jaicore.planning.hierarchical.problems.stn.TaskNetwork;
/**
* This is the class that conducts the actual problem reduction of software configuration to HTN Planning
*
* @author fmohr
*
*/
public class HASCOReduction<V extends Comparable<V>>
implements AlgorithmicProblemReduction<RefinementConfiguredSoftwareConfigurationProblem<V>, ComponentInstance, CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V>, IEvaluatedPlan<V>> {
private static final boolean CONFIGURE_PARAMS = true; // this could be determined automatically later
private static final Map<CNFFormula, Monom> EMPTY_PRECONDITION = new HashMap<>();
// component selection
private static final String RESOLVE_COMPONENT_IFACE_PREFIX = "1_tResolve";
private static final String RESOLVE_IFACE_GROUP_PREFIX = "1_tResolveGroup";
private static final String RESOLVE_COMPONENT_IFACE_OPTIONAL_PREFIX = "1_tResolveOpt";
private static final String RESOLVE_SINGLE = "1_tResolveSingle";
private static final String RESOLVE_SINGLE_OPTIONAL = "1_tResolveSingleOptional";
private static final String SATISFY_PREFIX = "1_satisfy";
// component configuration
private static final String REFINE_PARAMETERS_PREFIX = "2_tRefineParamsOf";
private static final String REFINE_PARAMETER_PREFIX = "2_tRefineParam";
private static final String DECLARE_CLOSED_PREFIX = "2_declareClosed";
private static final String REDEF_VALUE_PREFIX = "2_redefValue";
private static final String OMIT_RESOLUTION_PREFIX = "1_omitResolution";
private static final String COMPONENT_OF_C2 = "component(c2)";
private RefinementConfiguredSoftwareConfigurationProblem<V> originalProblem;
/* working variables */
private Collection<IComponent> components;
private INumericParameterRefinementConfigurationMap paramRefinementConfig;
public static Monom getInitState() {
return new Monom("component('request')");
}
public static List<CEOCOperation> getOperations(final Collection<? extends IComponent> components, final INumericParameterRefinementConfigurationMap paramRefinementConfig) {
List<CEOCOperation> operations = new ArrayList<>();
for (IComponent c : components) {
String cName = c.getName();
for (String i : c.getProvidedInterfaces()) {
List<VariableParam> opParams = new ArrayList<>();
opParams.add(new VariableParam("iGroupHandle")); // handle for the required interface to be resolved here
opParams.add(new VariableParam("iHandle")); // handle for the required interface to be resolved here
opParams.add(new VariableParam("cHandle")); // handle for the new component instance we create
int j = 0;
Map<CNFFormula, Monom> addList = new HashMap<>();
Monom standardKnowledgeAboutNewComponent = new Monom("component(cHandle) & resolves(iHandle, '" + i + "', '" + cName + "'," + " cHandle" + ") & usedin('" + cName + "', iGroupHandle)");
for (IParameter p : c.getParameters()) {
String pName = p.getName();
String paramIdentifier = "p" + (++j);
opParams.add(new VariableParam(paramIdentifier));
/* add the information about this parameter container */
List<LiteralParam> literalParams = new ArrayList<>();
literalParams.add(new ConstantParam(cName));
literalParams.add(new ConstantParam(pName));
literalParams.add(new VariableParam("cHandle"));
literalParams.add(new VariableParam(paramIdentifier));
standardKnowledgeAboutNewComponent.add(new Literal("parameterContainer", literalParams));
/* add knowledge about initial value */
List<LiteralParam> valParams = new ArrayList<>();
valParams.add(new VariableParam(paramIdentifier));
if (p.isNumeric()) {
standardKnowledgeAboutNewComponent.add(new Literal("parameterFocus(cHandle, '" + pName + "', '" + paramRefinementConfig.getRefinement(c, p).getFocusPoint() + "')"));
NumericParameterDomain np = (NumericParameterDomain) p.getDefaultDomain();
valParams.add(new ConstantParam("[" + np.getMin() + "," + np.getMax() + "]"));
} else {
valParams.add(new ConstantParam(p.getDefaultValue().toString()));
}
standardKnowledgeAboutNewComponent.add(new Literal("val", valParams));
}
int r = 0;
for (IRequiredInterfaceDefinition requiredInterface : c.getRequiredInterfaces()) {
String reqIntIdentifier = "r" + (++r);
String requiredInterfaceID = requiredInterface.getId();
opParams.add(new VariableParam(reqIntIdentifier));
List<LiteralParam> literalParams = new ArrayList<>();
literalParams.add(new ConstantParam(cName));
literalParams.add(new ConstantParam(requiredInterfaceID));
literalParams.add(new VariableParam("cHandle"));
literalParams.add(new VariableParam(reqIntIdentifier));
standardKnowledgeAboutNewComponent.add(new Literal("interfaceGroup", literalParams));
}
addList.put(new CNFFormula(), standardKnowledgeAboutNewComponent);
CEOCOperation newOp = new CEOCOperation(SATISFY_PREFIX + i + "With" + cName, opParams, new Monom(), addList, new HashMap<>(), new ArrayList<>());
operations.add(newOp);
}
}
/* operations to require uniqueness of components in group interface */
Map<CNFFormula, Monom> addListUniqueness = new HashMap<>();
addListUniqueness.put(new CNFFormula(), new Monom("uniqueComponents(iGroupHandle)"));
operations.add(new CEOCOperation("1_requireUniqueness", "iGroupHandle", new Monom(), addListUniqueness, EMPTY_PRECONDITION, ""));
/* operations for interface definitions */
Map<CNFFormula, Monom> addList = new HashMap<>();
addList.put(new CNFFormula(), new Monom("interfaceMember(iHandle, iGroupHandle, iIndex)"));
CEOCOperation defInterfaceOp = new CEOCOperation("1_defineInterface", "iGroupHandle, iHandle, iIndex", new Monom(), addList, new HashMap<>(), "");
operations.add(defInterfaceOp);
/* create operations for parameter initialization */
// redefValue(container, previousValue, newValue)
Map<CNFFormula, Monom> redefOpAddList = new HashMap<>();
redefOpAddList.put(new CNFFormula(), new Monom("val(container,newValue) & overwritten(container)"));
Map<CNFFormula, Monom> redefOpDelList = new HashMap<>();
redefOpDelList.put(new CNFFormula(), new Monom("val(container,previousValue)"));
operations.add(new CEOCOperation(REDEF_VALUE_PREFIX, "container,previousValue,newValue", new Monom("val(container,previousValue)"), redefOpAddList, redefOpDelList, ""));
// declareClosed(container)
Map<CNFFormula, Monom> closeOpAddList = new HashMap<>();
closeOpAddList.put(new CNFFormula(), new Monom("closed(container)"));
operations.add(new CEOCOperation(DECLARE_CLOSED_PREFIX, "container", new Monom(), closeOpAddList, new HashMap<>(), ""));
// omitResolution(c1, i , c2)
Map<CNFFormula, Monom> omitResolutionOpAddList = new HashMap<>();
omitResolutionOpAddList.put(new CNFFormula(), new Monom("anyOmitted(iGroupHandle) & omitted(cHandle)"));
operations.add(new CEOCOperation(OMIT_RESOLUTION_PREFIX, "iGroupHandle,iHandle,cHandle", new Monom(), omitResolutionOpAddList, new HashMap<>(), ""));
return operations;
}
public static List<OCIPMethod> getParameterRefinementMethods(final Collection<? extends IComponent> components) {
List<OCIPMethod> methods = new ArrayList<>();
// Non-list interfaces methods
for (IComponent c : components) {
String cName = c.getName();
/* go, in an ordering that is consistent with the pre-order on the params imposed by the dependencies, over the set of params */
if (CONFIGURE_PARAMS) {
/* create methods for choosing/refining parameters */
List<VariableParam> paramRefinementsParams = new ArrayList<>();
paramRefinementsParams.add(new VariableParam("c2"));
List<Literal> networkForRefinements = new ArrayList<>();
StringBuilder refinementArgumentsSB = new StringBuilder();
int j = 0;
for (IParameter p : c.getParameters()) {
String pName = p.getName();
String pIdent = "p" + (++j);
refinementArgumentsSB.append(", " + pIdent);
paramRefinementsParams.add(new VariableParam(pIdent));
networkForRefinements.add(new Literal(REFINE_PARAMETER_PREFIX + pName + "Of" + cName + "(c2, " + pIdent + ")"));
// ignoreParamRefinementFor<p>Of<c>(object, container, curval)
methods.add(getMethodIgnoreParamRefinement(cName, pName));
// refineParam<p>Of<c>(c2, p1, ..., pm)
methods.add(getMethodRefineParam(cName, pName));
}
networkForRefinements.add(new Literal(REFINE_PARAMETERS_PREFIX + cName + "(c2" + refinementArgumentsSB.toString() + ")"));
// refineParamsOf<c>(c2, p1, ..., pm)
methods.add(new OCIPMethod("refineParamsOf" + cName, paramRefinementsParams, new Literal(REFINE_PARAMETERS_PREFIX + cName + "(c2" + refinementArgumentsSB.toString() + ")"), new Monom(COMPONENT_OF_C2),
new TaskNetwork(networkForRefinements), false, new ArrayList<>(), new Monom("!refinementCompleted('" + cName + "', c2)")));
// closeRefinementOfParamsOf<c>(c2, p1, ..., pm)
methods.add(new OCIPMethod("closeRefinementOfParamsOf" + cName, paramRefinementsParams, new Literal(REFINE_PARAMETERS_PREFIX + cName + "(c2" + refinementArgumentsSB.toString() + ")"), new Monom(COMPONENT_OF_C2),
new TaskNetwork(), false, new ArrayList<>(), new Monom("refinementCompleted('" + cName + "', c2)")));
}
}
return methods;
}
public static List<OCIPMethod> getMethodsToResolveInterfaceWithComponent(final Collection<? extends IComponent> components) {
List<OCIPMethod> methods = new ArrayList<>();
// Non-list interfaces methods
for (IComponent c : components) {
String cName = c.getName();
// resolve<i>With<c>(c1; c2, p1, ..., pm, r1, ..., rn)
/* create methods for the refinement of the interfaces offered by this component */
for (String i : c.getProvidedInterfaces()) {
List<VariableParam> inputParams = Arrays.asList(new VariableParam("iGroupHandle"), new VariableParam("iHandle"), new VariableParam("cHandle"));
List<VariableParam> outputParams = new ArrayList<>();
Collection<IRequiredInterfaceDefinition> requiredInterfaces = c.getRequiredInterfaces();
/* create string for the arguments of this operation */
StringBuilder satisfyOpArgumentsSB = new StringBuilder();
if (CONFIGURE_PARAMS) {
for (int j = 1; j <= c.getParameters().size(); j++) {
String paramIdentifier = "p" + j;
satisfyOpArgumentsSB.append(", " + paramIdentifier);
}
}
for (int r = 1; r <= requiredInterfaces.size(); r++) {
satisfyOpArgumentsSB.append(",iSubGroup_" + r);
}
/* configure task network for this method */
List<Literal> network = new ArrayList<>();
network.add(new Literal(SATISFY_PREFIX + i + "With" + cName + "(iGroupHandle, iHandle, cHandle" + satisfyOpArgumentsSB.toString() + ")"));
int r = 0;
for (IRequiredInterfaceDefinition ir : requiredInterfaces) {
outputParams.add(new VariableParam("iSubGroup_" + (r + 1)));
network.add(new Literal(RESOLVE_IFACE_GROUP_PREFIX + ir.getId() + "of" + c.getName() + "(cHandle, iSubGroup_" + (r + 1) + ")"));
r++;
}
/* */
StringBuilder refinementArgumentsSB = new StringBuilder();
if (CONFIGURE_PARAMS) {
for (int j = 1; j <= c.getParameters().size(); j++) {
String paramIdentifier = "p" + j;
outputParams.add(new VariableParam(paramIdentifier));
refinementArgumentsSB.append(", " + paramIdentifier);
}
}
network.add(new Literal(REFINE_PARAMETERS_PREFIX + cName + "(cHandle" + refinementArgumentsSB.toString() + ")"));
/* create the outputs of this method and add the method to the collection */
List<VariableParam> methodParams = new ArrayList<>();
methodParams.addAll(inputParams);
methodParams.addAll(outputParams);
methods.add(new OCIPMethod("resolve" + i + "With" + cName, methodParams, new Literal(RESOLVE_SINGLE + i + "(iGroupHandle, iHandle, cHandle)"), new Monom("!uniqueComponents(iGroupHandle)"), new TaskNetwork(network), false,
outputParams, new Monom()));
methods.add(new OCIPMethod("resolveUniquely" + i + "With" + cName, methodParams, new Literal(RESOLVE_SINGLE + i + "(iGroupHandle, iHandle, cHandle)"),
new Monom("uniqueComponents(iGroupHandle) & !usedin('" + c.getName() + "', iGroupHandle)"), new TaskNetwork(network), false, outputParams, new Monom()));
}
}
return methods;
}
public static List<OCIPMethod> getMethodsToResolveInterfaceGroup(final Collection<? extends IComponent> components) {
List<OCIPMethod> methods = new ArrayList<>();
// Non-list interfaces methods
for (IComponent c : components) {
for (IRequiredInterfaceDefinition ri : c.getRequiredInterfaces()) {
List<VariableParam> methodInputs = Arrays.asList(new VariableParam("cHandle"), new VariableParam("iGroupHandle"));
List<VariableParam> methodOutputs = new ArrayList<>();
List<Literal> network = new ArrayList<>();
if (ri.isUniqueComponents()) {
network.add(new Literal("1_requireUniqueness(iGroupHandle)"));
}
for (int j = 1; j <= ri.getMax(); j++) {
methodOutputs.add(new VariableParam("ri_" + j));
methodOutputs.add(new VariableParam("cHandle_" + j));
network.add(new Literal("1_defineInterface(iGroupHandle, ri_" + j + ", '" + j + "')"));
}
// Tasks: tResolveSingle<i>(c1, c2_1)... tResolveSingle<i>(c1, c2_<min(I)>)
for (int j = 1; j <= ri.getMin(); j++) {
network.add(new Literal(RESOLVE_SINGLE + ri.getName() + "(iGroupHandle, ri_" + j + ", cHandle_" + j + ")"));
}
// Tasks: tResolveSingleOptional<i>(c1, c2_<min(I)+1>)... tResolveSingleOptional<i>(c1, c2_<max(I)>)
for (int j = ri.getMin() + 1; j <= ri.getMax(); j++) {
network.add(new Literal(RESOLVE_SINGLE_OPTIONAL + ri.getName() + "(iGroupHandle, ri_" + j + ", cHandle_" + j + ")"));
}
List<VariableParam> methodParams = new ArrayList<>();
methodParams.addAll(methodInputs);
methodParams.addAll(methodOutputs);
methods.add(new OCIPMethod("resolveGroup" + ri.getId() + "of" + c.getName(), methodParams, new Literal(RESOLVE_IFACE_GROUP_PREFIX + ri.getId() + "of" + c.getName() + "(cHandle, iGroupHandle)"), new Monom(),
new TaskNetwork(network), false, methodOutputs, new Monom()));
if (ri.isOptional()) {
methods.add(new OCIPMethod("ignoreGroup" + ri.getId() + "of" + c.getName(), methodParams, new Literal(RESOLVE_IFACE_GROUP_PREFIX + ri.getId() + "of" + c.getName() + "(cHandle, iGroupHandle)"), new Monom(),
new TaskNetwork(), false, methodOutputs, new Monom()));
}
}
}
return methods;
}
public static List<OCIPMethod> getInterfaceResolutionMethods(final Collection<? extends IComponent> components) {
List<OCIPMethod> methods = new ArrayList<>();
methods.addAll(getMethodsToResolveInterfaceWithComponent(components));
methods.addAll(getMethodsToResolveInterfaceGroup(components));
/* get list of all names of any interface */
Set<String> interfaceNames = new HashSet<>();
components.forEach(c -> c.getRequiredInterfaces().forEach(ri -> interfaceNames.add(ri.getName())));
for (String iName : interfaceNames.stream().sorted().collect(Collectors.toList())) {
List<VariableParam> methodParams = new ArrayList<>();
List<Literal> network = new ArrayList<>();
List<VariableParam> methodOutputs = new ArrayList<>();
// <<=| doResolve<i>(c1, c2) |=>>
methodParams.add(new VariableParam("iGroupHandle"));
methodParams.add(new VariableParam("iHandle"));
methodParams.add(new VariableParam("cHandle"));
network.add(new Literal(RESOLVE_SINGLE + iName + "(iGroupHandle, iHandle, cHandle)"));
String condition = "!anyOmitted(iGroupHandle)";
methods.add(new OCIPMethod("doResolve" + iName, methodParams, new Literal(RESOLVE_SINGLE_OPTIONAL + iName + "(iGroupHandle, iHandle, cHandle)"), new Monom(condition), new TaskNetwork(network), false,
methodOutputs, new Monom()));
network = new ArrayList<>();
// <<=| doNotResolve<i>(c1, c2) |=>>
network.add(new Literal(OMIT_RESOLUTION_PREFIX + "(iGroupHandle, iHandle, cHandle)"));
methods.add(new OCIPMethod("doNotResolve" + iName, methodParams, new Literal(RESOLVE_SINGLE_OPTIONAL + iName + "(iGroupHandle, iHandle, cHandle)"), new Monom(), new TaskNetwork(network), false, methodOutputs,
new Monom()));
}
return methods;
}
public static List<OCIPMethod> getMethods(final Collection<? extends IComponent> components) {
List<OCIPMethod> methods = new ArrayList<>();
methods.addAll(getInterfaceResolutionMethods(components));
methods.addAll(getParameterRefinementMethods(components));
return methods;
}
public static OCIPMethod getMethodIgnoreParamRefinement(final String cName, final String pName) {
return new OCIPMethod("ignoreParamRefinementFor" + pName + "Of" + cName, "object, container, curval", new Literal(REFINE_PARAMETER_PREFIX + pName + "Of" + cName + "(object,container)"),
new Monom("parameterContainer('" + cName + "', '" + pName + "', object, container) & val(container,curval) & overwritten(container)"), new TaskNetwork(DECLARE_CLOSED_PREFIX + "(container)"), false, "",
new Monom("notRefinable('" + cName + "', object, '" + pName + "', container, curval)"));
}
public static OCIPMethod getMethodRefineParam(final String cName, final String pName) {
return new OCIPMethod("refineParam" + pName + "Of" + cName, "object, container, curval, newval", new Literal(REFINE_PARAMETER_PREFIX + pName + "Of" + cName + "(object,container)"),
new Monom("parameterContainer('" + cName + "', '" + pName + "', object, container) & val(container,curval)"), new TaskNetwork(REDEF_VALUE_PREFIX + "(container,curval,newval)"), false, "",
new Monom("isValidParameterRangeRefinement('" + cName + "', object, '" + pName + "', container, curval, newval)"));
}
public CEOCIPSTNPlanningDomain getPlanningDomain() {
return new CEOCIPSTNPlanningDomain(getOperations(this.components, this.paramRefinementConfig), getMethods(this.components));
}
public CEOCIPSTNPlanningProblem getPlanningProblem(final CEOCIPSTNPlanningDomain domain, final CNFFormula knowledge, final Monom init) {
Map<String, EvaluablePredicate> evaluablePredicates = new HashMap<>();
evaluablePredicates.put("isValidParameterRangeRefinement", new IsValidParameterRangeRefinementPredicate(this.components, this.paramRefinementConfig));
evaluablePredicates.put("notRefinable", new IsNotRefinablePredicate(this.components, this.paramRefinementConfig));
evaluablePredicates.put("refinementCompleted", new IsRefinementCompletedPredicate(this.components, this.paramRefinementConfig));
return new CEOCIPSTNPlanningProblem(domain, knowledge, init, new TaskNetwork(RESOLVE_SINGLE + this.originalProblem.getRequiredInterface() + "('rGroup', 'request', 'solution')"), evaluablePredicates, new HashMap<>());
}
public CEOCIPSTNPlanningProblem getPlanningProblem() {
return this.getPlanningProblem(this.getPlanningDomain(), new CNFFormula(), getInitState());
}
/**
* This method is a utility for everybody who wants to work on the graph obtained from HASCO's reduction but without using the search logic of HASCO
*
* @param plannerFactory
* @return
*/
public <T, A> IGraphGenerator<T, A> getGraphGeneratorUsedByHASCOForSpecificPlanner(final IHASCOPlanningReduction<T, A> transformer) {
return transformer.encodeProblem(this.getPlanningProblem()).getGraphGenerator();
}
@Override
public CostSensitiveHTNPlanningProblem<CEOCIPSTNPlanningProblem, V> encodeProblem(final RefinementConfiguredSoftwareConfigurationProblem<V> problem) {
if (problem.getRequiredInterface() == null) {
throw new IllegalArgumentException("No required interface defined in the problem!");
}
/* set object variables that will be important for several methods in the reduction */
this.originalProblem = problem;
this.components = this.originalProblem.getComponents();
this.paramRefinementConfig = this.originalProblem.getParamRefinementConfig();
/* build the cost insensitive planning problem */
CEOCIPSTNPlanningProblem planningProblem = this.getPlanningProblem();
/* derive a plan evaluator from the configuration evaluator */
return new CostSensitiveHTNPlanningProblem<>(planningProblem, new HASCOReductionSolutionEvaluator<>(problem, this));
}
@Override
public ComponentInstance decodeSolution(final IEvaluatedPlan<V> solution) {
return this.decodeSolution((IPlan) solution);
}
public ComponentInstance decodeSolution(final IPlan plan) {
return HASCOUtil.getSolutionCompositionForPlan(HASCOReduction.this.components, getInitState(), plan, true);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/core/reduction/softcomp2planning/HASCOReductionSolutionEvaluator.java
|
package ai.libs.hasco.core.reduction.softcomp2planning;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.api4.java.common.attributedobjects.ObjectEvaluationFailedException;
import org.api4.java.common.control.ILoggingCustomizable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.logging.ToJSONStringUtil;
import ai.libs.jaicore.planning.core.Action;
import ai.libs.jaicore.planning.core.interfaces.IPlan;
import ai.libs.jaicore.timing.TimeRecordingObjectEvaluator;
public class HASCOReductionSolutionEvaluator<V extends Comparable<V>> implements IObjectEvaluator<IPlan, V>, ILoggingCustomizable {
private Logger logger = LoggerFactory.getLogger(HASCOReductionSolutionEvaluator.class);
private final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem;
private final HASCOReduction<V> reduction;
private final IObjectEvaluator<IComponentInstance, V> evaluator;
private final TimeRecordingObjectEvaluator<IComponentInstance, V> timedEvaluator;
public HASCOReductionSolutionEvaluator(final RefinementConfiguredSoftwareConfigurationProblem<V> configurationProblem, final HASCOReduction<V> reduction) {
super();
this.configurationProblem = configurationProblem;
this.reduction = reduction;
this.evaluator = this.configurationProblem.getCompositionEvaluator();
this.timedEvaluator = new TimeRecordingObjectEvaluator<>(this.evaluator);
}
public HASCOReduction<V> getReduction() {
return this.reduction;
}
@Override
public V evaluate(final IPlan plan) throws InterruptedException, ObjectEvaluationFailedException {
ComponentInstance solution = this.reduction.decodeSolution(plan);
if (solution == null) {
throw new IllegalArgumentException("The following plan yields a null solution: \n\t" + plan.getActions().stream().map(Action::getEncoding).collect(Collectors.joining("\n\t")));
}
this.logger.info("Forwarding evaluation request for CI {} to evaluator {}", solution, this.evaluator.getClass().getName());
return this.timedEvaluator.evaluate(solution);
}
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("problem", this.configurationProblem);
return ToJSONStringUtil.toJSONString(this.getClass().getSimpleName(), fields);
}
@Override
public String getLoggerName() {
return this.logger.getName();
}
@Override
public void setLoggerName(final String name) {
this.logger = LoggerFactory.getLogger(name);
if (this.evaluator instanceof ILoggingCustomizable) {
this.logger.info("Setting logger of evaluator {} to {}.be", this.evaluator.getClass().getName(), name);
((ILoggingCustomizable) this.evaluator).setLoggerName(name + ".be");
} else {
this.logger.info("Evaluator {} cannot be customized for logging, so not configuring its logger.", this.evaluator.getClass().getName());
}
}
public IObjectEvaluator<IComponentInstance, V> getEvaluator() {
return this.evaluator;
}
public TimeRecordingObjectEvaluator<IComponentInstance, V> getTimedEvaluator() {
return this.timedEvaluator;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/civiewplugin/CIViewGUIPlugin.java
|
package ai.libs.hasco.gui.civiewplugin;
import java.util.Collection;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.graphvisualizer.plugin.nodeinfo.NodeInfoGUIPlugin;
public class CIViewGUIPlugin extends NodeInfoGUIPlugin {
public CIViewGUIPlugin(String title, Collection<? extends IComponent> components) {
super(title, new TFDNodeAsCIViewInfoGenerator(components));
}
public CIViewGUIPlugin(Collection<? extends IComponent> components) {
this("CI View", components);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/civiewplugin/TFDNodeAsCIViewInfoGenerator.java
|
package ai.libs.hasco.gui.civiewplugin;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.api.IParameter;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.graphvisualizer.plugin.nodeinfo.NodeInfoGenerator;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
import ai.libs.jaicore.search.model.travesaltree.BackPointerPath;
/**
* This info generator is meant to be used in combination with the node info plug-in.
*
* @author wever
*/
public class TFDNodeAsCIViewInfoGenerator implements NodeInfoGenerator<BackPointerPath<TFDNode, String, Double>> {
private Collection<IComponent> components;
public TFDNodeAsCIViewInfoGenerator(final Collection<? extends IComponent> components) {
this.components = new ArrayList<>(components);
}
@Override
public String generateInfoForNode(final BackPointerPath<TFDNode, String, Double> node) {
ComponentInstance ci = HASCOUtil.getSolutionCompositionFromState(this.components, node.getHead().getState(), true);
if (ci == null) {
return "<i>No component has been chosen, yet.</i>";
} else {
return this.visualizeComponentInstance(ci);
}
}
private String visualizeComponentInstance(final IComponentInstance ci) {
StringBuilder sb = new StringBuilder();
sb.append("<div style=\"border: 1px solid #333; padding: 10px; font-family: Arial, non-serif;\">");
/* add the name of the component */
sb.append("<div style=\"text-align: center;font-size: 18px; font-weight: bold;\">" + ci.getComponent().getName() + "</div>");
sb.append("<table style=\"width: 100%;\">");
sb.append("<tr style=\"background: #e0e0e0;\"><th>Parameter</th><th>Value</th></tr>");
int i = 0;
for (IParameter parameter : ci.getComponent().getParameters()) {
if (i % 2 == 0) {
sb.append("<tr style=\"background: #f2f2f2;\">");
} else {
sb.append("<tr style=\"background: #efefef;\">");
}
sb.append("<td>" + parameter.getName() + "</td>");
sb.append("<td>" + (ci.getParameterValues().containsKey(parameter.getName()) ? ci.getParameterValue(parameter) : "not yet set") + "</td>");
sb.append("</tr>");
i++;
}
sb.append("</table>");
for (Entry<String, List<IComponentInstance>> subComponent : ci.getSatisfactionOfRequiredInterfaces().entrySet()) {
sb.append(subComponent.getKey());
subComponent.getValue().forEach(subCi -> sb.append(this.visualizeComponentInstance(subCi)));
}
sb.append("</div>");
return sb.toString();
}
@Override
public String getName() {
return TFDNodeAsCIViewInfoGenerator.class.getName();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/ComponentInstanceSerializer.java
|
package ai.libs.hasco.gui.statsplugin;
import java.io.IOException;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstance;
public class ComponentInstanceSerializer {
private ObjectMapper objectMapper;
public ComponentInstanceSerializer() {
this.initializeObjectMapper();
}
public String serializeComponentInstance(final IComponentInstance componentInstance) throws JsonProcessingException {
return this.objectMapper.writeValueAsString(componentInstance);
}
public ComponentInstance deserializeComponentInstance(final String serializedComponentInstance) throws IOException {
return this.objectMapper.readValue(serializedComponentInstance, ComponentInstance.class);
}
private void initializeObjectMapper() {
this.objectMapper = new ObjectMapper();
this.objectMapper.setVisibility(PropertyAccessor.ALL, Visibility.NONE);
this.objectMapper.setVisibility(PropertyAccessor.FIELD, Visibility.ANY);
// make sure that the object mapper stores type information when serializing objects
this.objectMapper.enableDefaultTyping();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/HASCOModelStatisticsComponentSelector.java
|
package ai.libs.hasco.gui.statsplugin;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.components.api.IRequiredInterfaceDefinition;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.UnparametrizedComponentInstance;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfo;
import javafx.application.Platform;
import javafx.collections.ObservableList;
import javafx.scene.control.ComboBox;
import javafx.scene.control.TreeItem;
/**
* @author fmohr
*
* This class represents a logical entry of the tree.
*
* It holds a listener for its combo box that updates the sub-tree and the histogram if a filter is set.
*/
public class HASCOModelStatisticsComponentSelector extends TreeItem<HASCOModelStatisticsComponentSelector> {
private static final Logger logger = LoggerFactory.getLogger(HASCOModelStatisticsComponentSelector.class);
private final HASCOModelStatisticsComponentSelector parent;
private final String requiredInterface;
private final ComboBox<String> componentSelector;
private final HASCOModelStatisticsPluginModel model;
public HASCOModelStatisticsComponentSelector(final HASCOModelStatisticsPluginView rootView, final HASCOModelStatisticsPluginModel model) {
this(rootView, null, null, model);
}
public HASCOModelStatisticsComponentSelector(final HASCOModelStatisticsPluginView rootView, final HASCOModelStatisticsComponentSelector parent, final String requiredInterface, final HASCOModelStatisticsPluginModel model) {
this.parent = parent;
this.requiredInterface = requiredInterface;
this.model = model;
this.componentSelector = new ComboBox<>();
this.componentSelector.getItems().add("*");
this.componentSelector.setValue("*");
this.componentSelector.valueProperty().addListener((observable, oldValue, newValue) -> {
HASCOModelStatisticsComponentSelector.this.getChildren().clear();
if (!newValue.equals("*")) {
Collection<IRequiredInterfaceDefinition> requiredInterfacesOfThisChoice = model.getKnownComponents().get(newValue).getRequiredInterfaces();
for (IRequiredInterfaceDefinition reqInterface : requiredInterfacesOfThisChoice) {
String requiredInterfaceId = reqInterface.getId();
HASCOModelStatisticsComponentSelector.this.getChildren().add(new HASCOModelStatisticsComponentSelector(rootView, HASCOModelStatisticsComponentSelector.this, requiredInterfaceId, model));
}
}
rootView.updateHistogram();
});
this.update();
this.setValue(this); // set the value to itself. This is necessary so that the cell factory really retrieves this object as the node
this.setExpanded(true);
}
/**
* This recursively updates the whole tree view under this node with respect to the current selections.
*
* This method is currently not too efficient, because it always iterates over all solutions, but it is still fast enough.
*/
public void update() {
long start = System.currentTimeMillis();
List<Pair<String, String>> selectionPath = this.getSelectionsOnPathToRoot();
List<String> reqInterfacePath = selectionPath.stream().map(Pair::getX).collect(Collectors.toList());
reqInterfacePath.remove(0); // this is null and only needed as a selector in the selectionPath
ObservableList<String> items = this.componentSelector.getItems();
for (ScoredSolutionCandidateInfo scoredSolutionCandidateInfo : this.model.getAllSeenSolutionCandidateFoundInfosUnordered()) {
ComponentInstance ci = this.model.deserializeComponentInstance(scoredSolutionCandidateInfo.getSolutionCandidateRepresentation());
/* determine sub-component relevant for this path and add the respective component lexicographically correctly (unless it is already in the list) */
UnparametrizedComponentInstance uci = new UnparametrizedComponentInstance(ci).getSubComposition(reqInterfacePath);
if (this.componentSelector.getItems().contains(uci.getComponentName())) {
continue;
}
logger.trace("Relevant UCI of {} for path {} is {}", ci, reqInterfacePath, uci);
int n = items.size();
String nameOfNewComponent = uci.getComponentName();
for (int i = 0; i <= n; i++) {
if (i == n || items.get(i).compareTo(nameOfNewComponent) >= 0) {
final int index = i;
Platform.runLater(() -> items.add(index, nameOfNewComponent));
break;
}
}
}
this.getChildren().forEach(ti -> ti.getValue().update());
long duration = System.currentTimeMillis() - start;
logger.debug("Update of {} took {}ms", this, duration);
}
/**
* Resets the combo box to the wild-card and removes all child nodes.
*/
public void clear() {
this.componentSelector.getItems().removeIf(s -> !s.equals("*"));
this.getChildren().clear();
}
/**
* Gets the choices made in the combo boxes on the path from the root to here. The first entry has a null-key just saying what the choice for the root component has been.
*
* @return List of choices.
*/
public List<Pair<String, String>> getSelectionsOnPathToRoot() {
List<Pair<String, String>> path = this.parent != null ? this.parent.getSelectionsOnPathToRoot() : new ArrayList<>();
path.add(new Pair<>(this.requiredInterface, this.componentSelector.getValue()));
return path;
}
/**
* Determines the set of all selection paths from here to a any leaf. For the root node, this is the set of constraints specified in the combo boxes.
*
* @return Collection of paths to leafs.
*/
public Collection<List<Pair<String, String>>> getAllSelectionsOnPathToAnyLeaf() {
Collection<List<Pair<String, String>>> subPaths = new ArrayList<>();
if (this.getChildren().isEmpty()) {
List<Pair<String, String>> leafRestriction = new ArrayList<>();
leafRestriction.add(new Pair<>(this.requiredInterface, this.componentSelector.getValue()));
subPaths.add(leafRestriction);
return subPaths;
}
for (TreeItem<HASCOModelStatisticsComponentSelector> child : this.getChildren()) {
subPaths.addAll(child.getValue().getAllSelectionsOnPathToAnyLeaf());
}
return subPaths.stream().map(p -> {
p.add(0, new Pair<>(this.requiredInterface, this.componentSelector.getValue()));
return p;
}).collect(Collectors.toList());
}
public String getRequiredInterface() {
return this.requiredInterface;
}
public ComboBox<String> getComponentSelector() {
return this.componentSelector;
}
@Override
public String toString() {
return "HASCOModelStatisticsComponentSelector";
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/HASCOModelStatisticsPlugin.java
|
package ai.libs.hasco.gui.statsplugin;
import java.util.Arrays;
import java.util.Collection;
import ai.libs.jaicore.graphvisualizer.events.recorder.property.AlgorithmEventPropertyComputer;
import ai.libs.jaicore.graphvisualizer.plugin.ASimpleMVCPlugin;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfoAlgorithmEventPropertyComputer;
import ai.libs.jaicore.search.gui.plugins.rollouthistograms.RolloutInfoAlgorithmEventPropertyComputer;
/**
*
* @author fmohr
*
* The main class of this plugin. Add instances of this plugin to the visualization window.
*/
public class HASCOModelStatisticsPlugin extends ASimpleMVCPlugin<HASCOModelStatisticsPluginModel, HASCOModelStatisticsPluginView, HASCOModelStatisticsPluginController> {
public HASCOModelStatisticsPlugin() {
this("HASCO Model Statistics");
}
public HASCOModelStatisticsPlugin(final String title) {
super(title);
}
@Override
public Collection<AlgorithmEventPropertyComputer> getPropertyComputers() {
return Arrays.asList(new RolloutInfoAlgorithmEventPropertyComputer(), new ScoredSolutionCandidateInfoAlgorithmEventPropertyComputer(new HASCOSolutionCandidateRepresenter()));
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/HASCOModelStatisticsPluginController.java
|
package ai.libs.hasco.gui.statsplugin;
import org.api4.java.algorithm.events.serializable.IPropertyProcessedAlgorithmEvent;
import ai.libs.hasco.core.events.HASCOSolutionEvent;
import ai.libs.jaicore.graphvisualizer.plugin.ASimpleMVCPluginController;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfo;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfoAlgorithmEventPropertyComputer;
/**
*
* @author fmohr
*
* The controller of the HASCOModelStatisticsPlugin
*
*/
public class HASCOModelStatisticsPluginController extends ASimpleMVCPluginController<HASCOModelStatisticsPluginModel, HASCOModelStatisticsPluginView> {
public HASCOModelStatisticsPluginController(final HASCOModelStatisticsPluginModel model, final HASCOModelStatisticsPluginView view) {
super(model, view);
}
@Override
protected void handleAlgorithmEventInternally(final IPropertyProcessedAlgorithmEvent algorithmEvent) {
if (algorithmEvent.correspondsToEventOfClass(HASCOSolutionEvent.class)) {
Object rawScoredSolutionCandidateInfo = algorithmEvent.getProperty(ScoredSolutionCandidateInfoAlgorithmEventPropertyComputer.SCORED_SOLUTION_CANDIDATE_INFO_PROPERTY_NAME);
if (rawScoredSolutionCandidateInfo != null) {
ScoredSolutionCandidateInfo scoredSolutionCandidateInfo = (ScoredSolutionCandidateInfo) rawScoredSolutionCandidateInfo;
this.getModel().addEntry(scoredSolutionCandidateInfo);
}
}
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/HASCOModelStatisticsPluginModel.java
|
package ai.libs.hasco.gui.statsplugin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.components.model.ComponentInstanceUtil;
import ai.libs.jaicore.components.model.UnparametrizedComponentInstance;
import ai.libs.jaicore.graphvisualizer.plugin.ASimpleMVCPluginModel;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfo;
/**
*
* @author fmohr
*
* Holds all the information to supply the HASCOModelStatisticsPluginView with what it needs.
*/
public class HASCOModelStatisticsPluginModel extends ASimpleMVCPluginModel<HASCOModelStatisticsPluginView, HASCOModelStatisticsPluginController> {
private static final Logger LOGGER = LoggerFactory.getLogger(HASCOModelStatisticsPluginModel.class);
private ComponentInstanceSerializer componentInstanceSerializer = new ComponentInstanceSerializer();
private final Map<UnparametrizedComponentInstance, List<ScoredSolutionCandidateInfo>> observedSolutionsGroupedModuloParameters = new HashMap<>();
private final Map<String, IComponent> knownComponents = new HashMap<>();
/**
* Informs the plugin about a new HASCOSolution. This solution will be considered in the combo boxes as well as in the histogram.
*
* @param solutionEvent
*/
public final void addEntry(final ScoredSolutionCandidateInfo scoredSolutionCandidateInfo) {
IComponentInstance ci = this.deserializeComponentInstance(scoredSolutionCandidateInfo.getSolutionCandidateRepresentation());
if (ci == null) {
return;
}
UnparametrizedComponentInstance uci = new UnparametrizedComponentInstance(ci);
if (!this.observedSolutionsGroupedModuloParameters.containsKey(uci)) {
this.observedSolutionsGroupedModuloParameters.put(uci, new ArrayList<>());
}
this.observedSolutionsGroupedModuloParameters.get(uci).add(scoredSolutionCandidateInfo);
ComponentInstanceUtil.getContainedComponents(ci).forEach(c -> {
if (!this.knownComponents.containsKey(c.getName())) {
this.knownComponents.put(c.getName(), c);
}
});
this.getView().update();
}
/**
* Gets an (unordered) collection of the solutions received so far.
*
* @return Collection of solutions.
*/
public Collection<ScoredSolutionCandidateInfo> getAllSeenSolutionCandidateFoundInfosUnordered() {
List<ScoredSolutionCandidateInfo> solutionEvents = new ArrayList<>();
this.observedSolutionsGroupedModuloParameters.values().forEach(solutionEvents::addAll);
return solutionEvents;
}
/**
* @return A map that assigns, for each known component, its name to the Component object.
*/
public Map<String, IComponent> getKnownComponents() {
return this.knownComponents;
}
/**
*
* @param composition
* @return
*/
public DescriptiveStatistics getPerformanceStatisticsForComposition(final UnparametrizedComponentInstance composition) {
DescriptiveStatistics stats = new DescriptiveStatistics();
this.observedSolutionsGroupedModuloParameters.get(composition).forEach(e -> stats.addValue(this.parseScoreToDouble(e.getScore())));
return stats;
}
/**
* Clears the model (and subsequently the view)
*/
@Override
public void clear() {
this.observedSolutionsGroupedModuloParameters.clear();
this.knownComponents.clear();
this.getView().clear();
}
public ComponentInstance deserializeComponentInstance(final String serializedComponentInstance) {
try {
return this.componentInstanceSerializer.deserializeComponentInstance(serializedComponentInstance);
} catch (IOException e) {
LOGGER.warn("Cannot deserialize component instance {}.", serializedComponentInstance, e);
}
return null;
}
public double parseScoreToDouble(final String score) {
return Double.parseDouble(score);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/gui/statsplugin/HASCOModelStatisticsPluginView.java
|
package ai.libs.hasco.gui.statsplugin;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import ai.libs.jaicore.basic.sets.Pair;
import ai.libs.jaicore.components.model.ComponentInstanceUtil;
import ai.libs.jaicore.graphvisualizer.events.gui.Histogram;
import ai.libs.jaicore.graphvisualizer.plugin.ASimpleMVCPluginView;
import ai.libs.jaicore.graphvisualizer.plugin.solutionperformanceplotter.ScoredSolutionCandidateInfo;
import javafx.application.Platform;
import javafx.scene.control.TreeView;
import javafx.scene.layout.VBox;
/**
*
* @author fmohr
*
*/
public class HASCOModelStatisticsPluginView extends ASimpleMVCPluginView<HASCOModelStatisticsPluginModel, HASCOModelStatisticsPluginController, VBox> {
private final HASCOModelStatisticsComponentSelector rootNode; // the root of the TreeView shown at the top
private final Histogram histogram; // the histogram shown on the bottom
public HASCOModelStatisticsPluginView(final HASCOModelStatisticsPluginModel model) {
this(model, 100);
}
public HASCOModelStatisticsPluginView(final HASCOModelStatisticsPluginModel model, final int n) {
super(model, new VBox());
this.rootNode = new HASCOModelStatisticsComponentSelector(this, model);
TreeView<HASCOModelStatisticsComponentSelector> treeView = new TreeView<>();
treeView.setCellFactory(HASCOModelStatisticsComponentCell::new);
treeView.setRoot(this.rootNode);
this.getNode().getChildren().add(treeView);
this.histogram = new Histogram(n);
this.histogram.setTitle("Performances observed on the filtered solutions");
this.getNode().getChildren().add(this.histogram);
}
@Override
public void update() {
this.rootNode.update();
this.updateHistogram();
}
/**
* Updates the histogram at the bottom. This is called in both the update method of the general view as well as in the change listener of the combo boxes.
*/
public void updateHistogram() {
Collection<List<Pair<String, String>>> activeFilters = this.rootNode.getAllSelectionsOnPathToAnyLeaf();
List<ScoredSolutionCandidateInfo> activeSolutions = this.getModel().getAllSeenSolutionCandidateFoundInfosUnordered().stream()
.filter(i -> ComponentInstanceUtil.matchesPathRestrictions(this.getModel().deserializeComponentInstance(i.getSolutionCandidateRepresentation()), activeFilters)).collect(Collectors.toList());
DescriptiveStatistics stats = new DescriptiveStatistics();
activeSolutions.forEach(s -> stats.addValue(this.getModel().parseScoreToDouble(s.getScore())));
Platform.runLater(() -> this.histogram.update(stats));
}
@Override
public void clear() {
this.rootNode.clear();
this.histogram.clear();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/metamining/IMetaMiner.java
|
package ai.libs.hasco.metamining;
import ai.libs.jaicore.components.model.ComponentInstance;
/**
* Used to to compute a score for a given {@link ComponentInstance} based on
* meta features of the ComponentInstance and possibly also its application
* context.
*
* @author Helena Graf
*
*/
public interface IMetaMiner {
/**
* Gives a score to the given {@link ComponentInstance} based on its meta
* features and possibly meta features of the application context as well. The
* score reflects an estimate of the quality of the (partial) solution the
* ComponentInstance represents.
*
* @param componentInstance
* The instance for which an estimate is to be made
* @return The estimated score
*/
public double score(ComponentInstance componentInstance);
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/metamining/MetaMinerBasedSorter.java
|
package ai.libs.hasco.metamining;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.core.HASCOUtil;
import ai.libs.jaicore.components.api.IComponent;
import ai.libs.jaicore.components.model.ComponentInstance;
import ai.libs.jaicore.logging.LoggerUtil;
import ai.libs.jaicore.planning.hierarchical.algorithms.forwarddecomposition.graphgenerators.tfd.TFDNode;
/**
* A Comparator for {@link TFDNode}s that sorts based on meta information about the underlying {@link ComponentInstance} of the node and possibly application context.
*
* @author Helena Graf
*
*/
public class MetaMinerBasedSorter implements Comparator<TFDNode> {
private Logger logger = LoggerFactory.getLogger(MetaMinerBasedSorter.class);
/**
* Components for the current configuration used to convert TFDNodes to ComponentInstances
*/
private Collection<IComponent> components;
/**
* The "MetaMiner" has access to the meta information of the given {@link ComponentInstance} and possibly its application context. It is used to derive a score of a given ComponentInstance, based on which a comparison of the given
* {@link TFDNode}s is made.
*/
private IMetaMiner metaminer;
public MetaMinerBasedSorter(final IMetaMiner metaminer, final Collection<? extends IComponent> components) {
if (components == null) {
this.logger.warn("No Components in sorter!");
}
this.components = new ArrayList<>(components);
this.metaminer = metaminer;
}
@Override
public int compare(final TFDNode o1, final TFDNode o2) {
if (this.convertToComponentInstance(o1) == null || this.convertToComponentInstance(o2) == null) {
this.logger.warn("Cannot compare pipelines when one is null.");
return 0;
}
if (o1.equals(o2)) {
this.logger.info("Comparing two nodes which are the same.");
return 0;
}
double score1 = this.metaminer.score(this.convertToComponentInstance(o1));
double score2 = this.metaminer.score(this.convertToComponentInstance(o2));
try {
this.logger.trace("Node {} converted to {}", o1, this.convertToComponentInstance(o1).getPrettyPrint());
} catch (IOException e) {
this.logger.error("Logging failed due to {}", LoggerUtil.getExceptionInfo(e));
}
try {
this.logger.trace("Node {} converted to {}", o2, this.convertToComponentInstance(o2).getPrettyPrint());
} catch (IOException e) {
this.logger.error("Logging failed due to {}", LoggerUtil.getExceptionInfo(e));
}
this.logger.debug("Comparing nodes with scores: {} vs {}", score1, score2);
return (int) Math.signum(score1 - score2);
}
/**
* Converts the given TFDNode to a ComponentInstance.
*
* @param node
* The TFDNode to convert
* @return The TFDNode as a ComponentInstance
*/
protected ComponentInstance convertToComponentInstance(final TFDNode node) {
return HASCOUtil.getSolutionCompositionFromState(this.components, node.getState(), false);
}
/**
* Gets the {@link IMetaMiner}, which is used to derive a score for a given {@link TFDNode} based on its attached {@link ComponentInstance}.
*
* @return The meta miner
*/
public IMetaMiner getMetaminer() {
return this.metaminer;
}
/**
* Sets the {@link IMetaMiner}, which is used to derive a score for a given {@link TFDNode} based on its attached {@link ComponentInstance}.
*
* @param metaminer
* The meta miner
*/
public void setMetaminer(final IMetaMiner metaminer) {
this.metaminer = metaminer;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/observers/HASCOModelStatisticsObserver.java
|
package ai.libs.hasco.observers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.events.HASCOSolutionEvent;
import ai.libs.jaicore.components.model.UnparametrizedComponentInstance;
public class HASCOModelStatisticsObserver {
private final Map<UnparametrizedComponentInstance, List<HASCOSolutionEvent<Double>>> observedSolutionsGroupedByModuloParameters = new HashMap<>();
@Subscribe
public void receiveSolutionEvent(final HASCOSolutionEvent<Double> event) {
UnparametrizedComponentInstance comp = new UnparametrizedComponentInstance(event.getSolutionCandidate().getComponentInstance());
if (!this.observedSolutionsGroupedByModuloParameters.containsKey(comp)) {
this.observedSolutionsGroupedByModuloParameters.put(comp, new ArrayList<>());
}
this.observedSolutionsGroupedByModuloParameters.get(comp).add(event);
}
public Map<UnparametrizedComponentInstance, List<HASCOSolutionEvent<Double>>> getObservedSolutionsGroupedByModuloParameters() {
return this.observedSolutionsGroupedByModuloParameters;
}
public DescriptiveStatistics getPerformanceStatisticsForComposition(final UnparametrizedComponentInstance composition) {
DescriptiveStatistics stats = new DescriptiveStatistics();
this.observedSolutionsGroupedByModuloParameters.get(composition).forEach(e -> stats.addValue(e.getSolutionCandidate().getScore()));
return stats;
}
public Map<UnparametrizedComponentInstance, DescriptiveStatistics> getPerformanceStatisticsPerComposition() {
Map<UnparametrizedComponentInstance, DescriptiveStatistics> statsMap = new HashMap<>();
for (UnparametrizedComponentInstance composition : this.observedSolutionsGroupedByModuloParameters.keySet()) {
statsMap.put(composition, this.getPerformanceStatisticsForComposition(composition));
}
return statsMap;
}
public DescriptiveStatistics getEvaluationTimeStatisticsForComposition(final UnparametrizedComponentInstance composition) {
DescriptiveStatistics stats = new DescriptiveStatistics();
this.observedSolutionsGroupedByModuloParameters.get(composition).forEach(e -> stats.addValue(e.getSolutionCandidate().getTimeToEvaluateCandidate()));
return stats;
}
public Map<UnparametrizedComponentInstance, DescriptiveStatistics> getEvaluationTimeStatisticsPerComposition() {
return this.getPerformanceStatisticsPerComposition();
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/HASCOWithRandomCompletionsConfig.java
|
package ai.libs.hasco.twophase;
import java.util.concurrent.TimeUnit;
import org.api4.java.algorithm.Timeout;
import ai.libs.hasco.core.HASCOConfig;
import ai.libs.jaicore.basic.IOwnerBasedRandomizedAlgorithmConfig;
public interface HASCOWithRandomCompletionsConfig extends HASCOConfig, IOwnerBasedRandomizedAlgorithmConfig {
public static final String K_RANDOM_COMPLETIONS_NUM = "hasco.random_completions.num";
public static final String K_RANDOM_COMPLETIONS_TIMEOUT_NODE = "hasco.random_completions.timeout_node";
public static final String K_RANDOM_COMPLETIONS_TIMEOUT_PATH = "hasco.random_completions.timeout_path";
/**
* @return Number of random completions drawn with RDFS.
*/
@Key(K_RANDOM_COMPLETIONS_NUM)
@DefaultValue("3")
public int numberOfRandomCompletions();
/**
* @return Timeout in ms for a node (this is an upper bound for the sum of the evaluations of all randomly drawn candidates).
*/
@Key(K_RANDOM_COMPLETIONS_TIMEOUT_NODE)
@DefaultValue("15000")
public int timeoutForNodeEvaluation();
/**
* @return A timeout object representing the timeout for the evaluation of a single node.
*/
default Timeout getTimeoutForNodeEvaluation() {
return new Timeout(this.timeoutForNodeEvaluation(), TimeUnit.MILLISECONDS);
}
/**
* @return Timeout in ms for a single evaluation of a solution candidate
*/
@Key(K_RANDOM_COMPLETIONS_TIMEOUT_PATH)
@DefaultValue("15000")
public int timeoutForCandidateEvaluation();
/**
* @return A timeout object representing the timeout for the evaluation of a single candidate / random completion.
*/
default Timeout getTimeoutForCandidateEvaluation() {
return new Timeout(this.timeoutForCandidateEvaluation(), TimeUnit.MILLISECONDS);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/TwoPhaseCandidateEvaluator.java
|
package ai.libs.hasco.twophase;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.api4.java.algorithm.Timeout;
import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.api4.java.common.control.ILoggingCustomizable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.EventBus;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.core.events.TwoPhaseHASCOSolutionEvaluationEvent;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.serialization.ComponentSerialization;
import ai.libs.jaicore.logging.LoggerUtil;
import ai.libs.jaicore.timing.TimedComputation;
public class TwoPhaseCandidateEvaluator implements Runnable, ILoggingCustomizable {
private final EventBus eventBus;
private Logger logger = LoggerFactory.getLogger(TwoPhaseCandidateEvaluator.class);
private final ComponentSerialization serializer = new ComponentSerialization();
/* input variables */
private final IObjectEvaluator<IComponentInstance, Double> evaluator;
private final long selectionPhaseDeadline;
private final HASCOSolutionCandidate<Double> c;
/* derives variables */
private final int estimatedInSelectionSingleIterationEvaluationTime;
private final int estimatedPostProcessingTime;
private final int estimatedTotalEffortInCaseOfSelection;
private final int timeoutForEvaluation;
/* result variables */
private boolean completedSuccessfully = false;
private double selectionScore = Double.NaN;
private long trueEvaluationTime;
/* shared variables */
private final Semaphore sem;
public TwoPhaseCandidateEvaluator(final HASCOSolutionCandidate<Double> c, final long selectionPhaseDeadline, final double timeoutTolerance, final double blowupInSelection, final double blowupInPostProcessing,
final IObjectEvaluator<IComponentInstance, Double> evaluator, final Semaphore sem, final EventBus eventBus) {
super();
this.c = c;
this.selectionPhaseDeadline = selectionPhaseDeadline;
/* Time needed to compute the score of this solution in phase 1 */
int inSearchSolutionEvaluationTime = c.getTimeToEvaluateCandidate();
this.estimatedInSelectionSingleIterationEvaluationTime = (int) Math.round(inSearchSolutionEvaluationTime * blowupInSelection);
this.estimatedPostProcessingTime = (int) Math.round(this.estimatedInSelectionSingleIterationEvaluationTime * blowupInPostProcessing);
this.estimatedTotalEffortInCaseOfSelection = this.estimatedInSelectionSingleIterationEvaluationTime;
this.timeoutForEvaluation = (int) Math.max(2000, this.estimatedInSelectionSingleIterationEvaluationTime * (1 + timeoutTolerance));
this.evaluator = evaluator;
this.sem = sem;
this.eventBus = eventBus;
}
@Override
public void run() {
final long timestampStart = System.currentTimeMillis();
try {
/* We assume linear growth of the evaluation time here to estimate (A) time for
* selection phase, (B) time for post-processing the solution in case it gets selected. */
this.logger.info("Estimating {}ms re-evaluation time and {}ms build time for candidate {} in case of selection (evaluation time during search was {}ms).", this.estimatedInSelectionSingleIterationEvaluationTime,
this.estimatedPostProcessingTime, this.serializer.serialize(this.c.getComponentInstance()), this.c.getTimeToEvaluateCandidate());
/* If we have a global timeout, check whether considering this model is feasible. */
int remainingTime = Integer.MAX_VALUE;
if (this.selectionPhaseDeadline > 0) {
remainingTime = (int) (this.selectionPhaseDeadline - System.currentTimeMillis());
this.logger.info("Identified remaining time in selection phase of {}ms", remainingTime);
}
/* compute the timeout for this evaluation */
int effectiveTimeoutForEvaluation = Math.min(remainingTime - this.estimatedPostProcessingTime, this.timeoutForEvaluation);
if (effectiveTimeoutForEvaluation <= 0) {
this.logger.info("Not evaluating solution {} anymore, because its effective timeout, taking into account an anticipated post-processing time of {}ms, would be non-positive ({}).", this.c.getComponentInstance(),
this.estimatedPostProcessingTime, effectiveTimeoutForEvaluation);
return;
}
this.logger.info("Starting selection performance computation with effective timeout {}ms", effectiveTimeoutForEvaluation);
TimedComputation.compute(() -> {
this.selectionScore = this.evaluator.evaluate(this.c.getComponentInstance());
this.trueEvaluationTime = (System.currentTimeMillis() - timestampStart);
this.completedSuccessfully = true;
this.logger.info("Obtained evaluation score of {} after {}ms for candidate {} (score assigned by HASCO was {}).", this.selectionScore, this.trueEvaluationTime, this.serializer.serialize(this.c.getComponentInstance()),
this.c.getScore());
this.eventBus.post(new TwoPhaseHASCOSolutionEvaluationEvent(null, this.c.getComponentInstance(), this.selectionScore));
return true;
}, new Timeout(effectiveTimeoutForEvaluation, TimeUnit.MILLISECONDS), "Timeout for evaluation of ensemble candidate " + this.serializer.serialize(this.c.getComponentInstance()));
} catch (InterruptedException e) {
assert !Thread.currentThread().isInterrupted() : "The interrupted-flag should not be true when an InterruptedException is thrown!";
this.logger.info("Selection eval of {} got interrupted after {}ms. Defined timeout was: {}ms", this.serializer.serialize(this.c.getComponentInstance()), (System.currentTimeMillis() - timestampStart), this.timeoutForEvaluation);
Thread.currentThread().interrupt(); // no controlled interrupt needed here, because this is only a re-interrupt, and the execution will cease after this anyway
} catch (ExecutionException e) {
this.logger.error("Observed an exeption when trying to evaluate a candidate in the selection phase.\n{}", LoggerUtil.getExceptionInfo(e.getCause()));
} catch (AlgorithmTimeoutedException e) {
this.logger.info("Evaluation of candidate has timed out: {}", this.serializer.serialize(this.c.getComponentInstance()));
} finally {
this.sem.release();
this.logger.debug("Released. Sem state: {}", this.sem.availablePermits());
}
}
public double getSelectionScore() {
if (!this.completedSuccessfully) {
throw new IllegalStateException("The run was not completed succesfully. This exception is to avoid strange behavior, please check whether the run was successful before and only call this method then.");
}
return this.selectionScore;
}
public boolean isCompletedSuccessfully() {
return this.completedSuccessfully;
}
public HASCOSolutionCandidate<Double> getSolution() {
return this.c;
}
public int getEstimatedInSelectionSingleIterationEvaluationTime() {
return this.estimatedInSelectionSingleIterationEvaluationTime;
}
public int getEstimatedPostProcessingTime() {
return this.estimatedPostProcessingTime;
}
public int getEstimatedTotalEffortInCaseOfSelection() {
return this.estimatedTotalEffortInCaseOfSelection;
}
public int getTimeoutForEvaluation() {
return this.timeoutForEvaluation;
}
public long getTrueEvaluationTime() {
return this.trueEvaluationTime;
}
@Override
public String getLoggerName() {
return this.logger.getName();
}
@Override
public void setLoggerName(final String name) {
this.logger = LoggerFactory.getLogger(name);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/TwoPhaseHASCO.java
|
package ai.libs.hasco.twophase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Queue;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.aeonbits.owner.ConfigFactory;
import org.api4.java.ai.graphsearch.problem.IPathSearchInput;
import org.api4.java.algorithm.events.IAlgorithmEvent;
import org.api4.java.algorithm.exceptions.AlgorithmException;
import org.api4.java.algorithm.exceptions.AlgorithmExecutionCanceledException;
import org.api4.java.algorithm.exceptions.AlgorithmTimeoutedException;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.HASCO;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.core.events.HASCOSolutionEvent;
import ai.libs.hasco.core.events.TwoPhaseHASCOPhaseSwitchEvent;
import ai.libs.hasco.core.events.TwoPhaseHASCOSelectionPhaseSkippedEvent;
import ai.libs.hasco.core.events.TwoPhaseHASCOSolutionEvaluationEvent;
import ai.libs.jaicore.basic.MathExt;
import ai.libs.jaicore.basic.algorithm.AlgorithmFinishedEvent;
import ai.libs.jaicore.basic.algorithm.AlgorithmInitializedEvent;
import ai.libs.jaicore.basic.sets.SetUtil;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.optimizingfactory.SoftwareConfigurationAlgorithm;
import ai.libs.jaicore.components.serialization.ComponentSerialization;
import ai.libs.jaicore.concurrent.ANamedTimerTask;
import ai.libs.jaicore.concurrent.GlobalTimer;
import ai.libs.jaicore.logging.ToJSONStringUtil;
public class TwoPhaseHASCO<N, A> extends SoftwareConfigurationAlgorithm<TwoPhaseSoftwareConfigurationProblem, HASCOSolutionCandidate<Double>, Double> {
private static final String SUFFIX_HASCO = ".hasco";
/* logging */
private Logger logger = LoggerFactory.getLogger(TwoPhaseHASCO.class);
private String loggerName;
/* HASCO configuration */
private HASCO<N, A, Double> hasco;
private ANamedTimerTask phase1CancellationTask;
/** The solution selected during selection phase. */
private final Queue<HASCOSolutionCandidate<Double>> phase1ResultQueue = new LinkedBlockingQueue<>();
private final Map<HASCOSolutionCandidate<Double>, TwoPhaseCandidateEvaluator> selectionRuns = new HashMap<>();
private HASCOSolutionCandidate<Double> selectedHASCOSolution;
private final double blowupInSelection;
private final double blowupInPostProcessing;
private final ComponentSerialization serializer = new ComponentSerialization();
/* statistics */
private long timeOfStart = -1;
private int secondsSpentInPhase1;
@Override
public String toString() {
Map<String, Object> fields = new HashMap<>();
fields.put("hasco", this.hasco);
fields.put("phase1ResultQueue", this.phase1ResultQueue);
fields.put("selectedHASCOSolution", this.selectedHASCOSolution);
fields.put("timeOfStart", this.timeOfStart);
fields.put("secondsSpentInPhase1", this.secondsSpentInPhase1);
return ToJSONStringUtil.toJSONString(fields);
}
public TwoPhaseHASCO(final TwoPhaseSoftwareConfigurationProblem problem, final TwoPhaseHASCOConfig config) {
super(config != null ? config : ConfigFactory.create(TwoPhaseHASCOConfig.class), problem);
this.logger.info("Created TwoPhaseHASCO object.");
this.blowupInSelection = this.getConfig().expectedBlowupInSelection();
this.blowupInPostProcessing = this.getConfig().expectedBlowupInPostprocessing();
if (Double.isNaN(this.blowupInSelection)) {
throw new IllegalArgumentException("Blow-Up for selection phase not configured properly.");
}
if (Double.isNaN(this.blowupInPostProcessing)) {
throw new IllegalArgumentException("Blow-Up for post-processing phase not configured properly.");
}
}
public TwoPhaseHASCO(final TwoPhaseSoftwareConfigurationProblem problem, final TwoPhaseHASCOConfig config, final HASCO<N, A, Double> hasco) {
this(problem, config);
this.setHasco(hasco);
}
public void setHasco(final HASCO<N, A, Double> hasco) {
this.hasco = hasco;
this.setHASCOLoggerNameIfPossible();
this.hasco.setConfig(this.getConfig());
this.hasco.registerListener(new Object() {
@Subscribe
public void receiveHASCOEvent(final IAlgorithmEvent event) {
/* forward the HASCO events and register solutions to update best seen solutions and fill up the queue */
if (!(event instanceof AlgorithmInitializedEvent || event instanceof AlgorithmFinishedEvent)) {
TwoPhaseHASCO.this.post(event);
}
if (event instanceof HASCOSolutionEvent) {
@SuppressWarnings("unchecked")
HASCOSolutionCandidate<Double> solution = ((HASCOSolutionEvent<Double>) event).getSolutionCandidate();
TwoPhaseHASCO.this.updateBestSeenSolution(solution);
TwoPhaseHASCO.this.logger.info("Received new solution {} with score {} and evaluation time {}ms", solution.getComponentInstance(), solution.getScore(), solution.getTimeToEvaluateCandidate());
TwoPhaseHASCO.this.phase1ResultQueue.add(solution);
}
}
}); // this is to register solutions during runtime
}
private void awaitTerminationOfHASCO() throws InterruptedException {
/* wait HASCO to complete the cancel */
AtomicBoolean cancelCompleted = this.hasco.getCancelCompleted();
synchronized (cancelCompleted) {
this.logger.info("Waiting for HACSO to complete cancel.");
while (!cancelCompleted.get()) {
cancelCompleted.wait();
}
}
this.logger.info("HASCO completed cancel. Now throwing a cancelation exception.");
}
@Override
public IAlgorithmEvent nextWithException() throws InterruptedException, AlgorithmTimeoutedException, AlgorithmException, AlgorithmExecutionCanceledException {
this.logger.info("Stepping 2phase HASCO. Current state: {}", this.getState());
switch (this.getState()) {
case CREATED:
if (this.hasco == null) {
throw new IllegalStateException("Cannot start algorithm before HASCO has been set. Please set HASCO either in constructor or via the setter.");
}
this.timeOfStart = System.currentTimeMillis();
AlgorithmInitializedEvent event = this.activate();
this.logger.info(
"Starting 2-Phase HASCO with the following setup:\n\tCPUs:{},\n\tTimeout: {}s\n\tTimeout per node evaluation: {}ms\n\tTimeout per candidate: {}ms\n\tNumber of Random Completions: {}\n\tExpected blow-ups are {} (selection) and {} (post-processing).\nThe search factory is: {}",
this.getNumCPUs(), this.getTimeout().seconds(), this.getConfig().timeoutForNodeEvaluation(), this.getConfig().timeoutForCandidateEvaluation(), this.getConfig().numberOfRandomCompletions(), this.blowupInSelection,
this.blowupInPostProcessing, this.hasco.getSearchFactory());
this.setHASCOLoggerNameIfPossible();
this.logger.info("Initialized HASCO with start time {}.", this.timeOfStart);
return event;
/* active is only one step in this model; this could be refined */
case ACTIVE:
/* phase 1: gather solutions */
if (this.hasco.getTimeout().milliseconds() >= 0) {
GlobalTimer timer = GlobalTimer.getInstance();
this.phase1CancellationTask = new ANamedTimerTask() {
@Override
public void exec() {
try {
/* check whether the algorithm has been shutdown, then also cancel this task */
if (TwoPhaseHASCO.this.isShutdownInitialized()) {
this.cancel();
return;
}
/* check termination of phase 1 */
int timeElapsed = (int) (System.currentTimeMillis() - TwoPhaseHASCO.this.timeOfStart);
int timeRemaining = (int) TwoPhaseHASCO.this.hasco.getTimeout().milliseconds() - timeElapsed;
if (timeRemaining < 2000 || TwoPhaseHASCO.this.shouldSearchTerminate(timeRemaining)) {
TwoPhaseHASCO.this.logger.info("Canceling HASCO (first phase). {}ms remaining.", timeRemaining);
TwoPhaseHASCO.this.hasco.cancel();
TwoPhaseHASCO.this.logger.info("HASCO canceled successfully after {}ms", (System.currentTimeMillis() - TwoPhaseHASCO.this.timeOfStart) - timeElapsed);
this.cancel(); // cancels the TIMER, not 2PHASE-HASCO!!
}
} catch (Exception e) {
TwoPhaseHASCO.this.logger.error("Observed {} while checking termination of phase 1. Stack trace is: {}", e.getClass().getName(),
Arrays.stream(e.getStackTrace()).map(se -> "\n\t" + se.toString()).collect(Collectors.joining()));
}
}
};
this.phase1CancellationTask.setDescriptor("TwoPhaseHASCO task to check termination of phase 1");
timer.scheduleAtFixedRate(this.phase1CancellationTask, 1000, 1000);
}
this.logger.info("Entering phase 1. Calling HASCO with timeout {}.", this.hasco.getTimeout());
try {
this.hasco.call();
} catch (AlgorithmExecutionCanceledException e) {
this.logger.info("HASCO has terminated due to a cancel. My own cancel state is: {}", this.isCanceled());
if (this.isCanceled()) {
this.awaitTerminationOfHASCO();
throw new AlgorithmExecutionCanceledException(e.getDelay());
}
} catch (AlgorithmTimeoutedException e) {
this.logger.warn("HASCO has timeouted. In fact, time to deadline is {}ms", this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
} finally {
if (this.phase1CancellationTask != null) {
this.phase1CancellationTask.cancel();
}
}
this.secondsSpentInPhase1 = (int) Math.round((System.currentTimeMillis() - this.timeOfStart) / 1000.0);
/* if there is no candidate, and the remaining time is very small, throw an AlgorithmTimeoutedException */
this.logger.info("HASCO has finished. {} solutions were found.", this.phase1ResultQueue.size());
if (this.phase1ResultQueue.isEmpty() && this.getRemainingTimeToDeadline().seconds() < 10) {
this.logger.info("No solution found within phase 1. Throwing an AlgorithmTimeoutedException (This is conventional behavior for when an algorithm has not identified its solution when the timeout bound is hit.)");
this.awaitTerminationOfHASCO();
this.terminate(); // this sends the AlgorithmFinishedEvent
throw new AlgorithmTimeoutedException(this.getRemainingTimeToDeadline().milliseconds() * -1);
}
/* phase 2: enter phase and set respective logs/events */
IObjectEvaluator<?, Double> selectionBenchmark = this.getInput().getSelectionBenchmark();
if (selectionBenchmark != null) {
if (this.logger.isInfoEnabled()) {
this.logger.info("Entering phase 2. Remaining time: {}ms", this.getRemainingTimeToDeadline().milliseconds());
this.logger.debug("Solutions seen so far had the following (internal) errors and evaluation times (one per line): {}", this.phase1ResultQueue.stream()
.map(e -> "\n\t" + MathExt.round(e.getScore(), 4) + " in " + e.getTimeToEvaluateCandidate() + "ms (" + this.serializer.serialize(e.getComponentInstance()) + ")").collect(Collectors.joining()));
}
this.post(new TwoPhaseHASCOPhaseSwitchEvent(this));
// Robustness check whether precondition of phase 2 is actually fulfilled.
if (this.phase1ResultQueue.isEmpty()) {
this.logger.error("Not a single solution found in the first phase. Thus, exit with exception.");
throw new AlgorithmException("Not a single solution candidate could be found in the first phase. Please check your search space configuration and search phase benchmark carefully.");
}
this.checkAndConductTermination();
/* phase 2: conduct it (select model) */
this.selectedHASCOSolution = this.selectModel();
} else {
this.logger.info("Selection phase is disabled. Returning best result of phase 1.");
final Optional<HASCOSolutionCandidate<Double>> bestSolutionOptional = this.phase1ResultQueue.stream().min((s1, s2) -> s1.getScore().compareTo(s2.getScore()));
if (!bestSolutionOptional.isPresent()) {
throw new IllegalStateException("Cannot select a model since phase 1 has not returned any result.");
}
this.selectedHASCOSolution = bestSolutionOptional.get();
}
this.setBestSeenSolution(this.selectedHASCOSolution);
assert this.getBestSeenSolution().equals(this.selectedHASCOSolution);
this.logger.info("TwoPhaseHASCO has finished. Possibly awaiting HASCO termination. State of HASCO cancellation: {}", this.hasco.getCancelCompleted().get());
if (this.hasco.isCanceled()) {
this.awaitTerminationOfHASCO();
this.logger.info("TwoPhaseHASCO has finished and HASCO canceallation is {}/{} (canceled/cancel completed)", this.hasco.isCanceled(), this.hasco.getCancelCompleted().get());
}
return this.terminate();
default:
throw new IllegalStateException("Cannot do anything in state " + this.getState());
}
}
protected boolean shouldSearchTerminate(final long timeRemaining) {
Collection<HASCOSolutionCandidate<Double>> currentSelection = this.getSelectionForPhase2();
int estimateForRemainingRuntime = this.getExpectedTotalRemainingRuntimeForAGivenPool(currentSelection, true);
boolean terminatePhase1 = estimateForRemainingRuntime + 5000 > timeRemaining;
int currentMemoryConsumption = (int) ((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1024 / 1024);
this.logger.info("{}ms of the available time remaining in total, and we estimate a remaining runtime of {}ms. Terminate phase 1: {}. Current memory consumption: {}MB", timeRemaining, estimateForRemainingRuntime, terminatePhase1,
currentMemoryConsumption);
return terminatePhase1;
}
public synchronized List<HASCOSolutionCandidate<Double>> getSelectionForPhase2() {
return this.getSelectionForPhase2(Integer.MAX_VALUE);
}
private static final double MAX_MARGIN_FROM_BEST = 0.03;
private synchronized List<HASCOSolutionCandidate<Double>> getSelectionForPhase2(final int remainingTime) {
if (this.getNumberOfConsideredSolutions() < 1) {
throw new UnsupportedOperationException("Cannot determine candidates for phase 2 if their number is set to a value less than 1. Here, it has been set to " + this.getNumberOfConsideredSolutions());
}
/* some initial checks for cases where we do not really have to do anything */
if (remainingTime < 0) {
throw new IllegalArgumentException("Cannot do anything in negative time (" + remainingTime + "ms)");
}
HASCOSolutionCandidate<Double> internallyOptimalSolution = this.getBestSeenSolution();
if (internallyOptimalSolution == null) {
return new ArrayList<>();
}
/* compute k pipeline candidates (the k/2 best, and k/2 random ones that do not deviate too much from the best one) */
double optimalInternalScore = internallyOptimalSolution.getScore();
Collection<HASCOSolutionCandidate<Double>> potentialCandidates = new ArrayList<>(this.phase1ResultQueue).stream().filter(solution -> solution.getScore() <= optimalInternalScore + MAX_MARGIN_FROM_BEST).sorted((s1,s2) -> Double.compare(s1.getScore(), s2.getScore())).collect(Collectors.toList());
int numberOfRelevantSolution = Math.min(this.getNumberOfConsideredSolutions(), potentialCandidates.size());
int bestK = (int) Math.min(numberOfRelevantSolution, Math.ceil((double) this.getNumberOfConsideredSolutions() / 2));
int randomK = numberOfRelevantSolution - bestK;
this.logger.debug("Computing {} best and {} random solutions for a max runtime of {}. Number of candidates that are at most {} worse than optimum {} is: {}/{}", bestK, randomK, remainingTime, MAX_MARGIN_FROM_BEST,
optimalInternalScore, potentialCandidates.size(), this.phase1ResultQueue.size());
List<HASCOSolutionCandidate<Double>> selectionCandidates = potentialCandidates.stream().limit(bestK).collect(Collectors.toList());
if (randomK > 0) {
List<HASCOSolutionCandidate<Double>> remainingCandidates = new ArrayList<>(SetUtil.difference(potentialCandidates, selectionCandidates));
Collections.shuffle(remainingCandidates, new Random(this.getConfig().randomSeed()));
selectionCandidates.addAll(remainingCandidates.stream().limit(randomK).collect(Collectors.toList()));
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Determined the following candidates for selection phase (in this order): {}", selectionCandidates.stream().map(c -> "\n\t" + c.getScore() + ": " + c.getComponentInstance()).collect(Collectors.joining()));
}
/* if the candidates can be evaluated in the remaining time, return all of them */
int budget = this.getExpectedTotalRemainingRuntimeForAGivenPool(selectionCandidates, true);
if (budget < remainingTime) {
return selectionCandidates;
}
/* otherwise return as much as can be expectedly done in the time */
List<HASCOSolutionCandidate<Double>> actuallySelectedSolutions = new ArrayList<>();
int expectedRuntime;
for (HASCOSolutionCandidate<Double> pl : selectionCandidates) {
actuallySelectedSolutions.add(pl);
expectedRuntime = this.getExpectedTotalRemainingRuntimeForAGivenPool(actuallySelectedSolutions, true);
if (expectedRuntime > remainingTime && actuallySelectedSolutions.size() > 1) {
this.logger.info("Not considering solution {} for phase 2, because the expected runtime of the whole thing would be {}/{}", pl, expectedRuntime, remainingTime);
actuallySelectedSolutions.remove(pl);
}
}
return actuallySelectedSolutions;
}
private int getInSearchEvaluationTimeOfSolutionSet(final Collection<HASCOSolutionCandidate<Double>> solutions) {
return solutions.stream().map(HASCOSolutionCandidate::getTimeToEvaluateCandidate).mapToInt(x -> x).sum();
}
public int getExpectedTotalRemainingRuntimeForAGivenPool(final Collection<HASCOSolutionCandidate<Double>> solutions, final boolean assumeCurrentlyBestCandidateToBeSelected) {
int timeForPhase2 = this.getExpectedRuntimeForPhase2ForAGivenPool(solutions);
int timeForPostprocessing = 0;
if (assumeCurrentlyBestCandidateToBeSelected && this.getBestSeenSolution() != null) {
timeForPostprocessing = this.getPostprocessingTimeOfCurrentlyBest();
} else {
timeForPostprocessing = this.getMaximumPostprocessingTimeOfAnyPoolMember(solutions);
}
return timeForPhase2 + timeForPostprocessing;
}
public int getPostprocessingTimeOfCurrentlyBest() {
return (int) Math.round(this.getBestSeenSolution().getTimeToEvaluateCandidate() * this.blowupInSelection * this.blowupInPostProcessing);
}
public int getMaximumPostprocessingTimeOfAnyPoolMember(final Collection<HASCOSolutionCandidate<Double>> solutions) {
int max = 0;
for (HASCOSolutionCandidate<Double> candidate : solutions) {
int expectedPostProcessingTime = (int) Math.ceil(candidate.getTimeToEvaluateCandidate() * this.blowupInSelection * this.blowupInPostProcessing);
max = Math.max(max, expectedPostProcessingTime);
}
return max;
}
public int getExpectedRuntimeForPhase2ForAGivenPool(final Collection<HASCOSolutionCandidate<Double>> solutions) {
int inSearchMCEvalTime = this.getInSearchEvaluationTimeOfSolutionSet(solutions);
int estimateEvaluationTimeForSelectionPhase = (int) (inSearchMCEvalTime * this.blowupInSelection);
int usableCPUs = Math.min(this.getConfig().cpus(), solutions.size());
int runtime = estimateEvaluationTimeForSelectionPhase / Math.max(1, usableCPUs);
this.logger.debug("Expected runtime is {} = {} * {} / {} for a candidate pool of size {}", runtime, inSearchMCEvalTime, this.blowupInSelection, usableCPUs, solutions.size());
return runtime;
}
public HASCOSolutionCandidate<Double> getBestSolutionOfPhase1() {
final Optional<HASCOSolutionCandidate<Double>> bestSolutionOptional = this.phase1ResultQueue.stream().min((s1, s2) -> s1.getScore().compareTo(s2.getScore()));
if (!bestSolutionOptional.isPresent()) {
throw new IllegalStateException("Cannot select a model since phase 1 has not returned any result.");
}
return bestSolutionOptional.get();
}
public List<HASCOSolutionCandidate<Double>> getEnsembleToSelectFromInPhase2() {
if (this.getTimeout().seconds() <= 0) {
return this.getSelectionForPhase2().stream().sorted((c1, c2) -> Double.compare(c1.getScore(), c2.getScore())).collect(Collectors.toList());
}
int remainingTime = (int) (this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
/* check remaining time, otherwise just return the solution with best F-Value. */
if (remainingTime < 0) {
HASCOSolutionCandidate<Double> bestSolution = this.getBestSolutionOfPhase1();
double scoreOfBestSolution = bestSolution.getScore();
this.logger.info("Timelimit is already exhausted, just returning a greedy solution that had internal error {}.", scoreOfBestSolution);
return Arrays.asList(bestSolution);
}
/* Get a queue of solutions to perform selection evaluation for. */
List<HASCOSolutionCandidate<Double>> ensembleToSelectFrom = this.getSelectionForPhase2(remainingTime); // should be ordered by scores already (at least the first k)
int expectedTimeForPhase2 = this.getExpectedRuntimeForPhase2ForAGivenPool(ensembleToSelectFrom);
int expectedPostprocessingTime = this.getPostprocessingTimeOfCurrentlyBest();
int expectedMaximumRemainingRuntime = expectedTimeForPhase2 + expectedPostprocessingTime;
remainingTime = (int) (this.getTimeout().milliseconds() - (System.currentTimeMillis() - this.timeOfStart));
if (expectedMaximumRemainingRuntime > remainingTime) {
this.logger.warn("Only {}ms remaining. We probably cannot make it in time.", remainingTime);
}
if (this.logger.isInfoEnabled()) {
this.logger.info(
"We expect phase 2 to consume {}ms for {} candidates, and post-processing is assumed to take at most {}ms, which is a total remaining runtime of {}ms. {}ms are permitted by timeout. The following candidates are considered (one per line with the internal error of phase 1): {}",
expectedTimeForPhase2, ensembleToSelectFrom.size(), expectedPostprocessingTime, expectedMaximumRemainingRuntime, remainingTime, ensembleToSelectFrom.stream()
.map(e -> "\n\t" + MathExt.round(e.getScore(), 4) + " in " + e.getTimeToEvaluateCandidate() + "ms (" + this.serializer.serialize(e.getComponentInstance()) + ")").collect(Collectors.joining()));
}
return ensembleToSelectFrom.stream().sorted((c1, c2) -> Double.compare(c1.getScore(), c2.getScore())).collect(Collectors.toList());
}
protected HASCOSolutionCandidate<Double> selectModel() throws InterruptedException {
/* determine the models from which we want to select */
this.logger.info("Starting with phase 2: Selection of final model among the {} solutions that were identified.", this.phase1ResultQueue.size());
long startOfPhase2 = System.currentTimeMillis();
List<HASCOSolutionCandidate<Double>> ensembleToSelectFrom = this.getEnsembleToSelectFromInPhase2();
if (ensembleToSelectFrom.isEmpty()) {
this.logger.warn("No solution contained in ensemble.");
this.post(new TwoPhaseHASCOSelectionPhaseSkippedEvent(this));
return null;
} else if (ensembleToSelectFrom.size() == 1) {
this.logger.info("No selection to make since there is only one candidate to select from.");
this.post(new TwoPhaseHASCOSelectionPhaseSkippedEvent(this));
return ensembleToSelectFrom.get(0);
}
/* setup the thread pool for evaluation */
AtomicInteger evaluatorCounter = new AtomicInteger(0);
int threadsForPool = this.getConfig().threads() < 1 ? this.getConfig().cpus() : this.getConfig().threads() - 1; // subtract one thread for the one that is currently active
this.logger.info("Create a thread pool for phase 2 of size {}.", threadsForPool);
ExecutorService pool = Executors.newFixedThreadPool(threadsForPool, r -> {
Thread t = new Thread(r);
t.setName("final-evaluator-" + evaluatorCounter.incrementAndGet());
return t;
});
/* evaluate each candidate */
final Semaphore sem = new Semaphore(0);
long timestampOfDeadline = this.timeOfStart + this.getTimeout().milliseconds() - 2000;
final IObjectEvaluator<IComponentInstance, Double> evaluator = this.getInput().getSelectionBenchmark();
final double timeoutTolerance = TwoPhaseHASCO.this.getConfig().selectionPhaseTimeoutTolerance();
final String loggerNameForWorkers = this.getLoggerName() + ".worker";
final EventBus selectionEventBus = new EventBus();
selectionEventBus.register(new Object() {
@Subscribe
public void receiveSolution(final TwoPhaseHASCOSolutionEvaluationEvent e) {
TwoPhaseHASCO.this.post(e);
}
});
for (HASCOSolutionCandidate<Double> c : ensembleToSelectFrom) {
TwoPhaseCandidateEvaluator run = new TwoPhaseCandidateEvaluator(c, timestampOfDeadline, timeoutTolerance, this.blowupInSelection, this.blowupInPostProcessing, evaluator, sem, selectionEventBus);
run.setLoggerName(loggerNameForWorkers);
this.selectionRuns.put(c, run);
pool.submit(run);
}
/* now wait for results */
int n = ensembleToSelectFrom.size();
this.logger.info("Waiting for termination of {} computations running on {} threads.", n, this.getConfig().cpus());
sem.acquire(n);
long endOfPhase2 = System.currentTimeMillis();
this.logger.info("Finished phase 2 within {}ms net. Total runtime was {}ms. Evaluated solutions {}/{}", endOfPhase2 - startOfPhase2, endOfPhase2 - this.timeOfStart, this.selectionRuns.values().stream().filter(TwoPhaseCandidateEvaluator::isCompletedSuccessfully).collect(Collectors.toList()).size(), n);
this.logger.debug("Shutting down thread pool");
pool.shutdownNow();
pool.awaitTermination(5, TimeUnit.SECONDS);
if (!pool.isShutdown()) {
this.logger.warn("Thread pool is not shut down yet!");
}
/* set chosen model */
Optional<TwoPhaseCandidateEvaluator> bestEvaluatedSolution = this.getCandidateThatWouldCurrentlyBeSelectedWithinPhase2(this.selectionRuns);
if (bestEvaluatedSolution.isPresent()) {
TwoPhaseCandidateEvaluator selectedModel = bestEvaluatedSolution.get();
HASCOSolutionCandidate<Double> solution = selectedModel.getSolution();
this.logger.info("Selected a configuration: {}. Its internal score was {}. Selection score was {}", this.serializer.serialize(solution.getComponentInstance()), solution.getScore(), selectedModel.getSelectionScore());
return solution;
} else {
this.logger.warn("Could not select any real solution in selection phase, just returning the best we have seen in HASCO.");
return this.getBestSolutionOfPhase1();
}
}
private synchronized Optional<TwoPhaseCandidateEvaluator> getCandidateThatWouldCurrentlyBeSelectedWithinPhase2(final Map<HASCOSolutionCandidate<Double>, TwoPhaseCandidateEvaluator> stats) {
return stats.entrySet().stream().map(Entry::getValue).filter(TwoPhaseCandidateEvaluator::isCompletedSuccessfully).min((e1, e2) -> Double.compare(e1.getSelectionScore(), e2.getSelectionScore()));
}
public HASCO<N, A, Double> getHasco() {
return this.hasco;
}
public Queue<HASCOSolutionCandidate<Double>> getPhase1ResultQueue() {
return this.phase1ResultQueue;
}
public int getSecondsSpentInPhase1() {
return this.secondsSpentInPhase1;
}
public Map<HASCOSolutionCandidate<Double>, TwoPhaseCandidateEvaluator> getSelectionPhaseEvaluationRunners() {
return this.selectionRuns;
}
@Override
public void shutdown() {
this.logger.info("Received shutdown signal. Cancelling phase 1 timer and invoking shutdown on parent.");
if (this.phase1CancellationTask != null) {
this.phase1CancellationTask.cancel();
}
super.shutdown();
}
@Override
public void cancel() {
this.logger.info("Received cancel signal.");
super.cancel();
this.logger.debug("Cancelling HASCO. My own cancel flag is {}", this.isCanceled());
if (this.hasco != null) {
this.hasco.cancel();
}
assert this.isCanceled() : "Cancel-flag is not true at the end of the cancel procedure!";
}
/**
* @return The solution candidate selected by TwoPhase HASCO
*/
public HASCOSolutionCandidate<Double> getSelectedSolutionCandidate() {
return this.selectedHASCOSolution;
}
@Override
public TwoPhaseHASCOConfig getConfig() {
return (TwoPhaseHASCOConfig) super.getConfig();
}
/**
* @return The number of considered solutions in the selection phase.
*/
public int getNumberOfConsideredSolutions() {
return this.getConfig().selectionNumConsideredSolutions();
}
/**
* @param numberOfConsideredSolutions
* The number of considered solutions in the selection phase.
*/
public void setNumberOfConsideredSolutions(final int numberOfConsideredSolutions) {
this.getConfig().setProperty(TwoPhaseHASCOConfig.K_SELECTION_NUM_CONSIDERED_SOLUTIONS, numberOfConsideredSolutions + "");
}
public IPathSearchInput<N, A> getGraphSearchInput() {
if (this.hasco == null) {
throw new IllegalStateException("Cannot retrieve GraphGenerator prior to algorithm initialization.");
}
if (this.hasco.getSearch() == null) {
throw new IllegalStateException("Cannot retrieve GraphGenerator prior to algorithm initialization.");
}
return this.hasco.getSearch().getInput();
}
public TwoPhaseHASCOReport getReort() {
return new TwoPhaseHASCOReport(this.phase1ResultQueue.size(), this.secondsSpentInPhase1, this.selectedHASCOSolution);
}
@Override
public String getLoggerName() {
return this.loggerName;
}
@Override
public void setLoggerName(final String name) {
this.loggerName = name;
this.logger.info("Switching logger from {} to {}", this.logger.getName(), name);
this.logger = LoggerFactory.getLogger(name);
this.logger.info("Activated logger {} with name {}", name, this.logger.getName());
this.serializer.setLoggerName(name + ".serializer");
this.setHASCOLoggerNameIfPossible();
super.setLoggerName(this.loggerName + "._orgraphsearch");
}
private void setHASCOLoggerNameIfPossible() {
if (this.getLoggerName() == null) {
return;
}
if (this.hasco == null) {
this.logger.info("HASCO object is null, so not setting a logger.");
return;
}
if (this.hasco.getLoggerName() != null && this.hasco.getLoggerName().equals(this.loggerName + SUFFIX_HASCO)) {
this.logger.info("HASCO logger has already been customized correctly, not customizing again.");
return;
}
this.logger.info("Setting logger of {} to {}{}", this.hasco.getId(), this.getLoggerName(), SUFFIX_HASCO);
this.hasco.setLoggerName(this.getLoggerName() + SUFFIX_HASCO);
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/TwoPhaseHASCOConfig.java
|
package ai.libs.hasco.twophase;
public interface TwoPhaseHASCOConfig extends HASCOWithRandomCompletionsConfig {
public static final String K_RANDOM_SEED = "hasco.seed";
public static final String K_BLOWUP_SELECTION = "hasco.blowup.selection";
public static final String K_BLOWUP_POSTPROCESS = "hasco.blowup.postprocess";
public static final String K_SELECTION_EVALUATION_TIMEOUT_TOLERANCE = "hasco.selection.timeouttolerance";
public static final String K_SELECTION_NUM_CONSIDERED_SOLUTIONS = "hasco.selection.num_considered_solutions";
/**
* @return The seed for the pseudo randomness generator.
*/
@Key(K_RANDOM_SEED)
@DefaultValue("0")
public int randomSeed();
/**
* @return The number of solutions that are considered during selection phase.
*/
@Key(K_SELECTION_NUM_CONSIDERED_SOLUTIONS)
@DefaultValue("100")
public int selectionNumConsideredSolutions();
/**
* @return Expected multiplication in time for each solution candidate that will be required for evaluation
*/
@Key(K_BLOWUP_SELECTION)
@DefaultValue("NaN")
public double expectedBlowupInSelection();
/**
* @return Expected multiplication in time for each solution candidate that will be required for a postprocessing that should be considered when computing the timeout
*/
@Key(K_BLOWUP_POSTPROCESS)
@DefaultValue("NaN")
public double expectedBlowupInPostprocessing();
/**
* @return The factor by which the evaluation in the selection phase may exceed the time expected on the basis of the estimate given by the blow-up
*/
@Key(K_SELECTION_EVALUATION_TIMEOUT_TOLERANCE)
@DefaultValue("0.2")
public double selectionPhaseTimeoutTolerance();
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/TwoPhaseHASCOReport.java
|
package ai.libs.hasco.twophase;
import ai.libs.hasco.core.HASCOSolutionCandidate;
public class TwoPhaseHASCOReport {
private final int numSolutionsInPhase1;
private final int durationPhase1;
private final HASCOSolutionCandidate<Double> returnedSolution;
public TwoPhaseHASCOReport(int numSolutionsInPhase1, int durationPhase1, HASCOSolutionCandidate<Double> returnedSolution) {
super();
this.numSolutionsInPhase1 = numSolutionsInPhase1;
this.durationPhase1 = durationPhase1;
this.returnedSolution = returnedSolution;
}
public int getNumSolutionsInPhase1() {
return numSolutionsInPhase1;
}
public int getDurationPhase1() {
return durationPhase1;
}
public HASCOSolutionCandidate<Double> getReturnedSolution() {
return returnedSolution;
}
}
|
0
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco
|
java-sources/ai/libs/hasco-core/0.2.7/ai/libs/hasco/twophase/TwoPhaseSoftwareConfigurationProblem.java
|
package ai.libs.hasco.twophase;
import java.io.File;
import java.io.IOException;
import org.api4.java.common.attributedobjects.IObjectEvaluator;
import ai.libs.jaicore.components.api.IComponentInstance;
import ai.libs.jaicore.components.api.INumericParameterRefinementConfigurationMap;
import ai.libs.jaicore.components.model.RefinementConfiguredSoftwareConfigurationProblem;
import ai.libs.jaicore.components.model.SoftwareConfigurationProblem;
public class TwoPhaseSoftwareConfigurationProblem extends RefinementConfiguredSoftwareConfigurationProblem<Double> {
private final IObjectEvaluator<IComponentInstance, Double> selectionBenchmark;
public TwoPhaseSoftwareConfigurationProblem(final File configurationFile, final String requiredInterface, final IObjectEvaluator<IComponentInstance, Double> compositionEvaluator,
final IObjectEvaluator<IComponentInstance, Double> selectionBenchmark) throws IOException {
super(configurationFile, requiredInterface, compositionEvaluator);
this.selectionBenchmark = selectionBenchmark;
}
public TwoPhaseSoftwareConfigurationProblem(final SoftwareConfigurationProblem<Double> coreProblem, final INumericParameterRefinementConfigurationMap paramRefinementConfig,
final IObjectEvaluator<IComponentInstance, Double> selectionBenchmark) {
super(coreProblem, paramRefinementConfig);
this.selectionBenchmark = selectionBenchmark;
}
public IObjectEvaluator<IComponentInstance, Double> getSelectionBenchmark() {
return this.selectionBenchmark;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((this.selectionBenchmark == null) ? 0 : this.selectionBenchmark.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
TwoPhaseSoftwareConfigurationProblem other = (TwoPhaseSoftwareConfigurationProblem) obj;
if (this.selectionBenchmark == null) {
if (other.selectionBenchmark != null) {
return false;
}
} else if (!this.selectionBenchmark.equals(other.selectionBenchmark)) {
return false;
}
return true;
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/ExtractionOfImportantParametersFailedException.java
|
package ai.libs.hasco.knowledgebase;
public class ExtractionOfImportantParametersFailedException extends Exception {
/**
*
*/
private static final long serialVersionUID = -8995563919172816333L;
public ExtractionOfImportantParametersFailedException(final String msg) {
super(msg);
}
public ExtractionOfImportantParametersFailedException(final String msg, final Throwable cause) {
super(msg, cause);
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/FANOVAParameterImportanceEstimator.java
|
package ai.libs.hasco.knowledgebase;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import ai.libs.hasco.core.Util;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.jaicore.ml.weka.rangequery.learner.intervaltree.ExtendedRandomForest;
import ai.libs.jaicore.ml.weka.rangequery.learner.intervaltree.featurespace.FeatureDomain;
import ai.libs.jaicore.ml.weka.rangequery.learner.intervaltree.featurespace.FeatureSpace;
import weka.core.Instances;
/**
* Parameter importance estimator using fANOVA.
*
* @author jmhansel
*
*/
public class FANOVAParameterImportanceEstimator implements IParameterImportanceEstimator {
private static final Logger LOGGER = LoggerFactory.getLogger(FANOVAParameterImportanceEstimator.class);
private PerformanceKnowledgeBase performanceKnowledgeBase;
private String benchmarkName;
private Map<String, HashMap<Set<Integer>, Double>> importanceDictionary;
private Map<String, Set<String>> importantParameterMap;
private int minNumSamples;
private double importanceThreshold;
private int sizeOfLargestSubsetToConsider;
private Set<String> prunedParameters;
public FANOVAParameterImportanceEstimator(final PerformanceKnowledgeBase performanceKnowledgeBase, final String benchmarkName, final int minNumSamples, final double importanceThreshold) {
this.performanceKnowledgeBase = performanceKnowledgeBase;
this.benchmarkName = benchmarkName;
this.importanceDictionary = new HashMap<>();
this.importantParameterMap = new HashMap<>();
this.minNumSamples = minNumSamples;
this.importanceThreshold = importanceThreshold;
// For now only consider subsets of size <= 2
this.sizeOfLargestSubsetToConsider = 2;
this.prunedParameters = new HashSet<>();
}
public FANOVAParameterImportanceEstimator(final String benchmarkName, final int minNumSamples, final double importanceThreshold) {
this(null, benchmarkName, minNumSamples, importanceThreshold);
}
/**
* Extract important parameters for subsets of size
* {@code sizeOfLargestSubsetToConsider}. Importance values are put into the
* importance dictionary. To recompute them, the flag {@code recompute} can be
* set.
*
* @param composition
* @param importanceThreshold
* @param sizeOfLargestSubsetsToConsider
* @return
* @throws Exception
*/
@Override
public Set<String> extractImportantParameters(final ComponentInstance composition, final boolean recompute) throws ExtractionOfImportantParametersFailedException {
String pipelineIdentifier = Util.getComponentNamesOfComposition(composition);
if (this.importantParameterMap.containsKey(pipelineIdentifier)) {
return this.importantParameterMap.get(pipelineIdentifier);
}
Instances data = this.performanceKnowledgeBase.getPerformanceSamples(this.benchmarkName, composition);
FeatureSpace space = new FeatureSpace(data);
Set<String> importantParameters = new HashSet<>();
if (space.getDimensionality() < 2) {
for (FeatureDomain domain : space.getFeatureDomains()) {
importantParameters.add(domain.getName());
}
return importantParameters;
}
// Set of all parameters to compute difference later
for (FeatureDomain domain : space.getFeatureDomains()) {
this.prunedParameters.add(domain.getName());
}
ExtendedRandomForest forest = new ExtendedRandomForest();
// forest.setMinNumSamples
try {
forest.buildClassifier(data);
forest.prepareForest(data);
} catch (Exception e) {
throw new ExtractionOfImportantParametersFailedException("Could not build model", e);
}
if (!this.importanceDictionary.containsKey(pipelineIdentifier)) {
this.importanceDictionary.put(pipelineIdentifier, new HashMap<Set<Integer>, Double>());
}
Set<Integer> parameterIndices = new HashSet<>();
for (int i = 0; i < data.numAttributes() - 1; i++) {
parameterIndices.add(i);
}
// for now we only consider subsets of size k <= 2
for (int k = 1; k <= this.sizeOfLargestSubsetToConsider; k++) {
Set<Set<Integer>> currentSubsets = Sets.combinations(parameterIndices, k);
for (Set<Integer> subset : currentSubsets) {
double currentImportance;
// if recomputation is desired of the dictionary has no importance value stored, compute it
if (recompute) {
currentImportance = forest.computeMarginalVarianceContributionForFeatureSubset(subset);
this.importanceDictionary.get(pipelineIdentifier).put(subset, currentImportance);
} else if (this.importanceDictionary.get(pipelineIdentifier).containsKey(subset)) {
LOGGER.debug("Taking value from dictionary");
currentImportance = this.importanceDictionary.get(pipelineIdentifier).get(subset);
}
// if no value is available in the dictionary, compute it
else {
currentImportance = forest.computeMarginalVarianceContributionForFeatureSubset(subset);
this.importanceDictionary.get(pipelineIdentifier).put(subset, currentImportance);
if (Double.isNaN(currentImportance)) {
currentImportance = 1.0;
LOGGER.debug("importance value is NaN, so it will be set to 1");
}
}
LOGGER.debug("Importance value for parameter subset {}: {}", subset, currentImportance);
LOGGER.debug("Importance value {} >= {}" + ": ", currentImportance, this.importanceThreshold, (currentImportance >= this.importanceThreshold));
if (currentImportance >= this.importanceThreshold) {
for (int i : subset) {
importantParameters.add(forest.getFeatureSpace().getFeatureDomain(i).getName());
}
}
}
}
this.importantParameterMap.put(pipelineIdentifier, importantParameters);
this.prunedParameters.removeAll(importantParameters);
return importantParameters;
}
/**
* Computes importance values for individual components.
*/
@Override
public Map<String, Double> computeImportanceForSingleComponent(final Component component) {
Instances data = this.performanceKnowledgeBase.getPerformanceSamplesForIndividualComponent(this.benchmarkName, component);
if (data == null) {
return null;
}
ExtendedRandomForest forest = new ExtendedRandomForest();
HashMap<String, Double> result = new HashMap<>();
try {
forest.buildClassifier(data);
for (int i = 0; i < data.numAttributes() - 1; i++) {
HashSet<Integer> set = new HashSet<>();
set.add(i);
double importance = forest.computeMarginalVarianceContributionForFeatureSubset(set);
result.put(data.attribute(i).name(), importance);
}
} catch (Exception e) {
LOGGER.error("Could not build model and compute marginal variance contribution.", e);
}
return result;
}
@Override
public boolean readyToEstimateImportance(final ComponentInstance composition) {
return this.performanceKnowledgeBase.kDistinctAttributeValuesAvailable(this.benchmarkName, composition, this.minNumSamples);
}
/**
* @return the performanceKnowledgeBase
*/
@Override
public PerformanceKnowledgeBase getPerformanceKnowledgeBase() {
return this.performanceKnowledgeBase;
}
/**
* @param performanceKnowledgeBase the performanceKnowledgeBase to set
*/
@Override
public void setPerformanceKnowledgeBase(final PerformanceKnowledgeBase performanceKnowledgeBase) {
this.performanceKnowledgeBase = performanceKnowledgeBase;
}
@Override
public int getNumberPrunedParameters() {
return this.prunedParameters.size();
}
@Override
public Set<String> getPrunedParameters() {
return this.prunedParameters;
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/FANOVAWarmstartComparator.java
|
package ai.libs.hasco.knowledgebase;
import java.util.Comparator;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.Parameter;
/**
* Comparator which compares parameters according to their importance using the
* FANOVAImportanceEstimator.
*
* @author jmhansel
*
*/
public class FANOVAWarmstartComparator implements Comparator<Parameter> {
private static final Logger LOGGER = LoggerFactory.getLogger(FANOVAWarmstartComparator.class);
private Map<String, Double> importanceValues;
private IParameterImportanceEstimator importanceEstimator;
public FANOVAWarmstartComparator(IParameterImportanceEstimator importanceEstimator, Component component) {
this.importanceEstimator = importanceEstimator;
this.importanceValues = this.importanceEstimator.computeImportanceForSingleComponent(component);
LOGGER.debug("importance values: {}", importanceValues);
}
/**
* Compares parameters according to their importance values
*/
@Override
public int compare(Parameter o1, Parameter o2) {
if (importanceValues == null) {
return 0;
}
LOGGER.debug("{} value: {}", o1, importanceValues.get(o1.toString()));
// We want the parameters to be sorted in descending order according to their importance
if (importanceValues.get(o1.getName()) < importanceValues.get(o2.getName())) {
return 1;
}
if (importanceValues.get(o1.getName()) > importanceValues.get(o2.getName())) {
return -1;
}
return 0;
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/IParameterImportanceEstimator.java
|
package ai.libs.hasco.knowledgebase;
import java.util.Map;
import java.util.Set;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
public interface IParameterImportanceEstimator {
/**
* Extracts the parameters of a composition that reach the given threshold
* w.r.t. importance
*
* @param composition
* @param importanceThreshold
* @param sizeOfLargestSubsetsToConsider
* @param recompute
* @return
* @throws Exception
*/
public Set<String> extractImportantParameters(ComponentInstance composition, boolean recompute) throws ExtractionOfImportantParametersFailedException;
/**
* Computes importance values for an individual component
*
* @param component
* @return
*/
public Map<String, Double> computeImportanceForSingleComponent(Component component);
/**
* Checks whether the estimator is ready to estimate parameter importance for
* the composition
*
* @param composition
* @return true if the estimator is ready, false otherwise
*/
public boolean readyToEstimateImportance(ComponentInstance composition);
/**
* Set the performance knowledge base used for parameter importance estimation
* @param performanceKB
*/
public void setPerformanceKnowledgeBase(PerformanceKnowledgeBase performanceKB);
/**
* Get the performance knowledge base used for parameter importance estimation
*/
public PerformanceKnowledgeBase getPerformanceKnowledgeBase();
/**
* Returns the number of parameters that have been pruned
* @return number of pruned parameters
*/
public int getNumberPrunedParameters();
public Set<String> getPrunedParameters();
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/IntermediateResultHandler.java
|
package ai.libs.hasco.knowledgebase;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.events.HASCOSolutionEvaluationEvent;
import ai.libs.jaicore.db.sql.SQLAdapter;
/**
* Class to handle HASCOSolutionEvaluationEvents and insert the results into the
* database.
*
* @author jmhansel
*
*/
public class IntermediateResultHandler {
public static final String TABLE_NAME = "performance_samples";
private SQLAdapter adapter;
private String benchmarkName;
private String testEvalTechnique;
private String testSplitTechnique;
private String valEvalTechnique;
private String valSplitTechnique;
private int testSeed;
private int valSeed;
public IntermediateResultHandler(final SQLAdapter adapter, final String benchmarkName, final String testEvalTechnique, final String testSplitTechnique, final int testSeed, final String valEvalTechnique, final String valSplitTechnique,
final int valSeed) {
this.adapter = adapter;
this.benchmarkName = benchmarkName;
this.testEvalTechnique = testEvalTechnique;
this.testSplitTechnique = testSplitTechnique;
this.testSeed = testSeed;
this.valEvalTechnique = valEvalTechnique;
this.valSplitTechnique = valSplitTechnique;
this.valSeed = valSeed;
}
@Subscribe
public void receiveSolutionEvaluationEvent(final HASCOSolutionEvaluationEvent<?, ?> solution) throws JsonProcessingException, SQLException {
Map<String, String> map = new HashMap<>();
ObjectMapper mapper = new ObjectMapper();
String composition = mapper.writeValueAsString(solution.getComposition());
map.put("composition", composition);
//// outer split
map.put("test_evaluation_technique", this.testEvalTechnique);
map.put("test_split_technique", this.testSplitTechnique);
map.put("test_seed", Integer.toString(this.testSeed));
//// inner split
map.put("val_evaluation_technique", this.valEvalTechnique);
map.put("val_split_technique", this.valSplitTechnique);
map.put("val_seed", Integer.toString(this.valSeed));
map.put("error_rate", solution.getScore().toString());
// training and test time, have to get myself with apache stopwatch
map.put("dataset", this.benchmarkName);
if (this.adapter != null) {
this.adapter.insert(TABLE_NAME, map);
}
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/PerformanceKnowledgeBase.java
|
package ai.libs.hasco.knowledgebase;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.tuple.Pair;
import org.api4.java.datastructure.kvstore.IKVStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import ai.libs.hasco.core.Util;
import ai.libs.hasco.model.CategoricalParameterDomain;
import ai.libs.hasco.model.Component;
import ai.libs.hasco.model.ComponentInstance;
import ai.libs.hasco.model.Dependency;
import ai.libs.hasco.model.IParameterDomain;
import ai.libs.hasco.model.NumericParameterDomain;
import ai.libs.hasco.model.Parameter;
import ai.libs.hasco.serialization.ParameterDeserializer;
import ai.libs.hasco.serialization.ParameterDomainDeserializer;
import ai.libs.jaicore.basic.StringUtil;
import ai.libs.jaicore.db.sql.SQLAdapter;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.ProtectedProperties;
/**
* Knowledge base that manages observed performance behavior
*
* @author jmhansel
*
*/
public class PerformanceKnowledgeBase {
private static final Logger LOGGER = LoggerFactory.getLogger(PerformanceKnowledgeBase.class);
private static final String LABEL_PERFORMANCE_SAMPLES = "performance_samples";
private SQLAdapter sqlAdapter;
private Map<String, HashMap<ComponentInstance, Double>> performanceSamples;
/** This is map contains a String */
private Map<String, HashMap<String, List<Pair<ParameterConfiguration, Double>>>> performanceSamplesByIdentifier;
private Map<String, HashMap<String, Instances>> performanceInstancesByIdentifier;
private Map<String, HashMap<String, Instances>> performanceInstancesIndividualComponents;
/**
* Inner helper class for managing parameter configurations easily.
*
* @author jmhansel
*
*/
private class ParameterConfiguration {
private final List<Pair<Parameter, String>> values;
public ParameterConfiguration(final ComponentInstance composition) {
ArrayList<Pair<Parameter, String>> temp = new ArrayList<>();
List<ComponentInstance> componentInstances = Util.getComponentInstancesOfComposition(composition);
for (ComponentInstance compInst : componentInstances) {
List<Parameter> parameters = compInst.getComponent().getParameters().getTotalOrder();
for (Parameter parameter : parameters) {
String value;
if (compInst.getParametersThatHaveBeenSetExplicitly().contains(parameter)) {
value = compInst.getParameterValues().get(parameter.getName());
} else {
value = parameter.getDefaultValue().toString();
}
temp.add(Pair.of(parameter, value));
}
}
// Make the list immutable to avoid problems with hashCode
this.values = Collections.unmodifiableList(temp);
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(this.values).toHashCode();
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof ParameterConfiguration)) {
return false;
}
ParameterConfiguration other = (ParameterConfiguration) obj;
return new EqualsBuilder().append(this.values, other.values).isEquals();
}
public List<Pair<Parameter, String>> getValues() {
return this.values;
}
}
public PerformanceKnowledgeBase(final SQLAdapter sqlAdapter) {
super();
this.sqlAdapter = sqlAdapter;
this.performanceInstancesByIdentifier = new HashMap<>();
this.performanceInstancesIndividualComponents = new HashMap<>();
}
public PerformanceKnowledgeBase() {
super();
this.performanceInstancesByIdentifier = new HashMap<>();
this.performanceInstancesIndividualComponents = new HashMap<>();
}
private Attribute getAttribute(final ComponentInstance ci, final Parameter parameter) {
IParameterDomain domain = parameter.getDefaultDomain();
if (domain instanceof CategoricalParameterDomain) {
CategoricalParameterDomain catDomain = (CategoricalParameterDomain) domain;
return new Attribute(ci.getComponent().getName() + "::" + parameter.getName(), Arrays.asList(catDomain.getValues()));
} else if (domain instanceof NumericParameterDomain) {
NumericParameterDomain numDomain = (NumericParameterDomain) domain;
String range = "[" + numDomain.getMin() + "," + numDomain.getMax() + "]";
Properties prop = new Properties();
prop.setProperty("range", range);
ProtectedProperties metaInfo = new ProtectedProperties(prop);
return new Attribute(ci.getComponent().getName() + "::" + parameter.getName(), metaInfo);
} else {
return null;
}
}
public Instances getInstancesForCI(final ComponentInstance ci) {
// Create Instances pipeline for this pipeline type
Instances instances = null;
// Add parameter domains as attributes
List<Parameter> parameters = ci.getComponent().getParameters().getTotalOrder();
ArrayList<Attribute> attributes = new ArrayList<>(parameters.size());
for (Parameter parameter : parameters) {
attributes.add(this.getAttribute(ci, parameter));
}
Attribute scoreAttr = new Attribute("performance_score");
attributes.add(scoreAttr);
instances = new Instances(LABEL_PERFORMANCE_SAMPLES, attributes, 16);
instances.setClass(scoreAttr);
return instances;
}
public Instances getInstancesForCIList(final List<ComponentInstance> componentInstances) {
// Create Instances pipeline for this pipeline type
Instances instances = null;
ArrayList<Attribute> allAttributes = new ArrayList<>();
for (ComponentInstance ci : componentInstances) {
List<Parameter> parameters = ci.getComponent().getParameters().getTotalOrder();
ArrayList<Attribute> attributes = new ArrayList<>(parameters.size());
for (Parameter parameter : parameters) {
attributes.add(this.getAttribute(ci, parameter));
}
allAttributes.addAll(attributes);
}
Attribute scoreAttr = new Attribute("performance_score");
allAttributes.add(scoreAttr);
instances = new Instances(LABEL_PERFORMANCE_SAMPLES, allAttributes, 16);
instances.setClass(scoreAttr);
return instances;
}
public void addPerformanceSample(final String benchmarkName, final ComponentInstance componentInstance, final double score, final boolean addToDB) {
String identifier = Util.getComponentNamesOfComposition(componentInstance);
if (this.performanceInstancesByIdentifier.get(benchmarkName) == null) {
HashMap<String, Instances> newMap = new HashMap<>();
HashMap<String, Instances> newMap2 = new HashMap<>();
this.performanceInstancesByIdentifier.put(benchmarkName, newMap);
this.performanceInstancesIndividualComponents.put(benchmarkName, newMap2);
}
if (!this.performanceInstancesByIdentifier.get(benchmarkName).containsKey(identifier)) {
// Add parameter domains as attributes
List<ComponentInstance> componentInstances = Util.getComponentInstancesOfComposition(componentInstance);
this.performanceInstancesByIdentifier.get(benchmarkName).put(identifier, this.getInstancesForCIList(componentInstances));
}
List<ComponentInstance> componentInstances = Util.getComponentInstancesOfComposition(componentInstance);
for (ComponentInstance ci : componentInstances) {
if (!this.performanceInstancesIndividualComponents.get(benchmarkName).containsKey(ci.getComponent().getName())) {
this.performanceInstancesIndividualComponents.get(benchmarkName).put(ci.getComponent().getName(), this.getInstancesForCI(ci));
}
}
// Add Instance for performance samples to corresponding Instances
Instances instances = this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier);
DenseInstance instance = new DenseInstance(instances.numAttributes());
ParameterConfiguration config = new ParameterConfiguration(componentInstance);
List<Pair<Parameter, String>> values = config.getValues();
for (int i = 0; i < instances.numAttributes() - 1; i++) {
Attribute attr = instances.attribute(i);
Parameter param = values.get(i).getLeft();
if (values.get(i).getRight() != null) {
if (param.isCategorical()) {
String value = values.get(i).getRight();
boolean attrContainsValue = false;
Enumeration<Object> possibleValues = attr.enumerateValues();
while (possibleValues.hasMoreElements() && !attrContainsValue) {
Object o = possibleValues.nextElement();
if (o.equals(value)) {
attrContainsValue = true;
}
}
if (attrContainsValue) {
instance.setValue(attr, value);
} else {
LOGGER.error("The value you're trying to insert is not in the attributes range!");
}
} else if (param.isNumeric()) {
double finalValue = Double.parseDouble(values.get(i).getRight());
instance.setValue(attr, finalValue);
}
}
}
Attribute scoreAttr = instances.classAttribute();
instance.setValue(scoreAttr, score);
this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier).add(instance);
// Add Instance for individual component
for (ComponentInstance ci : componentInstances) {
this.performanceInstancesIndividualComponents.get(benchmarkName).get(ci.getComponent().getName()).add(this.getInstanceForIndividualCI(benchmarkName, ci, score));
}
if (addToDB) {
this.addPerformanceSampleToDB(benchmarkName, componentInstance, score);
}
}
public Instance getInstanceForIndividualCI(final String benchmarkName, final ComponentInstance ci, final double score) {
Instances instancesInd = this.performanceInstancesIndividualComponents.get(benchmarkName).get(ci.getComponent().getName());
DenseInstance instanceInd = new DenseInstance(instancesInd.numAttributes());
for (int i = 0; i < instancesInd.numAttributes() - 1; i++) {
Attribute attr = instancesInd.attribute(i);
String attrFQN = attr.name();
String attrName = attrFQN.substring(attrFQN.indexOf("::") + 2);
Parameter param = ci.getComponent().getParameterWithName(attrName);
String value;
if (ci.getParametersThatHaveBeenSetExplicitly().contains(param)) {
value = ci.getParameterValues().get(param.getName());
} else {
value = param.getDefaultValue().toString();
}
if (value != null) {
if (param.isCategorical()) {
boolean attrContainsValue = false;
Enumeration<Object> possibleValues = attr.enumerateValues();
while (possibleValues.hasMoreElements() && !attrContainsValue) {
Object o = possibleValues.nextElement();
if (o.equals(value)) {
attrContainsValue = true;
}
}
if (attrContainsValue) {
instanceInd.setValue(attr, value);
}
} else if (param.isNumeric()) {
double finalValue = Double.parseDouble(value);
instanceInd.setValue(attr, finalValue);
}
}
}
Attribute scoreAttrInd = instancesInd.classAttribute();
instanceInd.setValue(scoreAttrInd, score);
return instanceInd;
}
public Map<String, HashMap<String, Instances>> getPerformanceInstancesIndividualComponents() {
return this.performanceInstancesIndividualComponents;
}
public Map<String, HashMap<String, List<Pair<ParameterConfiguration, Double>>>> getPerformanceSamplesByIdentifier() {
return this.performanceSamplesByIdentifier;
}
public String getStringOfMaps() {
return this.performanceSamples.toString();
}
public void initializeDBTables() {
/* initialize tables if not existent */
try {
List<IKVStore> rs = this.sqlAdapter.getResultsOfQuery("SHOW TABLES");
boolean havePerformanceTable = false;
for (IKVStore store : rs) {
String tableName = store.getAsString(StringUtil.firstElementWithPrefix(store.keySet(), "Tables_in"));
if (tableName.equals("performance_samples_J48")) {
havePerformanceTable = true;
}
}
if (!havePerformanceTable) {
LOGGER.debug("Creating table for performance samples");
this.sqlAdapter.update("CREATE TABLE `performance_samples_J48` (\r\n" + " `sample_id` int(10) NOT NULL AUTO_INCREMENT,\r\n" + " `dataset` varchar(200) COLLATE utf8_bin DEFAULT NULL,\r\n" + " `composition` json NOT NULL,\r\n"
+ " `error_rate` double NOT NULL,\r\n" + " `test_evaluation_technique` varchar(20) ,\r\n" + " `test_split_technique` varchar(20) ,\r\n" + " `val_evaluation_technique` varchar(20) ,\r\n"
+ " `val_split_technique` varchar(20) ,\r\n" + " `test_seed` int(11) ,\r\n" + " `val_seed` int(11) ,\r\n" + " PRIMARY KEY (`sample_id`)\r\n" + ") ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COLLATE=utf8_bin",
new ArrayList<>());
}
} catch (SQLException e) {
LOGGER.error("Could not create table for performance samples", e);
}
}
public void addPerformanceSampleToDB(final String benchmarkName, final ComponentInstance componentInstance, final double score) {
try {
Map<String, String> map = new HashMap<>();
map.put("benchmark", benchmarkName);
ObjectMapper mapper = new ObjectMapper();
String composition = mapper.writeValueAsString(componentInstance);
map.put("composition", composition);
map.put("score", "" + score);
this.sqlAdapter.insert(LABEL_PERFORMANCE_SAMPLES, map);
} catch (Exception e) {
LOGGER.error("An error occurred while storing the performance sample in the database.", e);
}
}
/**
* Returns the number of samples for the given benchmark name and pipeline
* identifier.
*
* @param benchmarkName
* @param identifier
* @return
*/
public int getNumSamples(final String benchmarkName, final String identifier) {
if (!this.performanceInstancesByIdentifier.containsKey(benchmarkName)) {
return 0;
}
if (!this.performanceInstancesByIdentifier.get(benchmarkName).containsKey(identifier)) {
return 0;
}
return this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier).numInstances();
}
/**
* Returns the number of significant samples for the given benchmark name and
* pipeline identifier. Significant means, that
*
* @param benchmarkName
* @param identifier
* @return
*/
public int getNumSignificantSamples(final String benchmarkName, final String identifier) {
if (!this.performanceInstancesByIdentifier.containsKey(benchmarkName)) {
return 0;
}
if (!this.performanceInstancesByIdentifier.get(benchmarkName).containsKey(identifier)) {
return 0;
}
Instances instances = this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier);
int numDistinctValues = 1;
for (int i = 0; i < instances.numInstances(); i++) {
for (int j = 0; j < i; j++) {
boolean allValuesDistinct = true;
for (int k = 0; k < instances.numAttributes(); k++) {
if (instances.get(i).value(k) == instances.get(j).value(k)) {
allValuesDistinct = false;
}
}
if (allValuesDistinct) {
numDistinctValues++;
}
}
}
return numDistinctValues;
}
public void loadPerformanceSamplesFromDB() {
if (this.sqlAdapter == null) {
LOGGER.error("SQLAdapter is not set.");
throw new IllegalArgumentException("No SQLAdapter set.");
}
try {
List<IKVStore> rslist = this.sqlAdapter.getResultsOfQuery("SELECT dataset, composition, error_rate FROM performance_samples_J48");
ObjectMapper mapper = new ObjectMapper();
for (IKVStore rs : rslist) {
String benchmarkName = rs.getAsString("dataset");
String ciString = rs.getAsString("composition");
if (!benchmarkName.equals("test")) {
SimpleModule parameterModule = new SimpleModule();
ParameterDeserializer des = new ParameterDeserializer();
parameterModule.addDeserializer(Parameter.class, des);
SimpleModule parameterDomainModule = new SimpleModule();
ParameterDomainDeserializer parameterDomainDes = new ParameterDomainDeserializer();
parameterDomainModule.addDeserializer(Dependency.class, parameterDomainDes);
ComponentInstance composition = mapper.readValue(ciString, ComponentInstance.class);
double score = rs.getAsDouble("error_rate");
this.addPerformanceSample(benchmarkName, composition, score, false);
}
}
} catch (SQLException e) {
LOGGER.error("An error occurred while trying to access the database", e);
} catch (IOException e) {
LOGGER.error("Could not parse json representation.", e);
}
}
/**
* Checks whether k samples are available, which are
*
* @param k
* @return
*/
public boolean kDistinctAttributeValuesAvailable(final String benchmarkName, final ComponentInstance composition, final int minNum) {
if (!this.performanceInstancesByIdentifier.containsKey(benchmarkName)) {
return false;
}
String identifier = Util.getComponentNamesOfComposition(composition);
if (!this.performanceInstancesByIdentifier.get(benchmarkName).containsKey(identifier)) {
return false;
}
Instances instances = this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier);
if (instances.numInstances() < minNum) {
return false;
}
for (int i = 0; i < instances.numAttributes() - 1; i++) {
// if the attribute is nominal or string but the number of values is smaller
// than k, skip it
if (instances.attribute(i).numValues() > 0 && instances.attribute(i).numValues() < minNum) {
if (instances.numDistinctValues(i) < instances.attribute(i).numValues()) {
return false;
}
} else if (instances.attribute(i).getUpperNumericBound() <= instances.attribute(i).getLowerNumericBound()) {
// nothing to do here
} else if (instances.numDistinctValues(i) < minNum) {
return false;
}
}
return true;
}
/**
* Checks whether at least k sample are available, that are pairwise distinct in
* each of their attribute values.
*
* @param benchmarkName
* @param composition
* @param minNum
* strictly positive minimum number of samples
* @return
*/
public boolean kCompletelyDistinctSamplesAvailable(final String benchmarkName, final ComponentInstance composition, final int minNum) {
if (!this.performanceInstancesByIdentifier.containsKey(benchmarkName)) {
return false;
}
String identifier = Util.getComponentNamesOfComposition(composition);
if (!this.performanceInstancesByIdentifier.get(benchmarkName).containsKey(identifier)) {
return false;
}
Instances instances = this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier);
if (instances.numInstances() == 0) {
return false;
}
if (minNum == 1 && instances.numInstances() > 0) {
return true;
}
int count = 0;
for (int i = 0; i < instances.numInstances(); i++) {
boolean distinctFromAll = true;
for (int j = 0; j < i; j++) {
Instance instance1 = instances.get(i);
Instance instance2 = instances.get(j);
for (int k = 0; k < instances.numAttributes() - 1; k++) {
if ((instances.attribute(k).isNominal() || instances.attribute(k).isString()) && (instances.attribute(k).numValues() < minNum)
|| instances.attribute(k).getUpperNumericBound() <= instances.attribute(k).getLowerNumericBound()) {
continue;
}
if (instance1.value(k) == instance2.value(k)) {
distinctFromAll = false;
}
}
}
if (distinctFromAll) {
count++;
}
if (count >= minNum) {
return true;
}
}
return false;
}
public Instances getPerformanceSamples(final String benchmarkName, final ComponentInstance composition) {
String identifier = Util.getComponentNamesOfComposition(composition);
return this.performanceInstancesByIdentifier.get(benchmarkName).get(identifier);
}
public Instances createInstancesForPerformanceSamples(final String benchmarkName, final ComponentInstance composition) {
return this.getPerformanceSamples(benchmarkName, composition);
}
public Instances getPerformanceSamplesForIndividualComponent(final String benchmarkName, final Component component) {
if (this.performanceInstancesIndividualComponents.get(benchmarkName) != null && this.performanceInstancesIndividualComponents.get(benchmarkName).get(component.getName()) != null) {
return this.performanceInstancesIndividualComponents.get(benchmarkName).get(component.getName());
}
return null;
}
public int getNumSamplesForComponent(final String benchmarkName, final Component component) {
if (this.performanceInstancesIndividualComponents.get(benchmarkName) != null && this.performanceInstancesIndividualComponents.get(benchmarkName).get(component.getName()) != null) {
return this.performanceInstancesIndividualComponents.get(benchmarkName).get(component.getName()).size();
}
return 0;
}
public void setPerformanceSamples(final Instances instances, final ComponentInstance composition, final String benchmarkName) {
String identifier = Util.getComponentNamesOfComposition(composition);
if (!this.performanceInstancesByIdentifier.containsKey(benchmarkName)) {
HashMap<String, Instances> newMap = new HashMap<>();
HashMap<String, Instances> newMap2 = new HashMap<>();
this.performanceInstancesByIdentifier.put(benchmarkName, newMap);
this.performanceInstancesIndividualComponents.put(benchmarkName, newMap2);
}
this.performanceInstancesByIdentifier.get(benchmarkName).put(identifier, instances);
}
}
|
0
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco
|
java-sources/ai/libs/hasco-fanova/0.2.1/ai/libs/hasco/knowledgebase/PerformanceSampleListener.java
|
package ai.libs.hasco.knowledgebase;
import com.google.common.eventbus.Subscribe;
import ai.libs.hasco.core.HASCOSolutionCandidate;
import ai.libs.hasco.events.HASCOSolutionEvent;
import ai.libs.hasco.model.ComponentInstance;
/**
*
* @author jmhansel
*
*/
public class PerformanceSampleListener {
private PerformanceKnowledgeBase performanceKnowledgeBase;
private String benchmarkName;
public PerformanceSampleListener(final PerformanceKnowledgeBase perfromanceKnowledgeBase, final String benchmarkName) {
this.performanceKnowledgeBase = perfromanceKnowledgeBase;
this.benchmarkName = benchmarkName;
}
@Subscribe
public void handleEvent(final HASCOSolutionEvent<Double> event) {
if (event.getSolutionCandidate() instanceof HASCOSolutionCandidate) {
HASCOSolutionCandidate<?> solutionCandidate = event.getSolutionCandidate();
ComponentInstance ci = solutionCandidate.getComponentInstance();
double score = (Double) solutionCandidate.getScore();
this.performanceKnowledgeBase.addPerformanceSample(this.benchmarkName, ci, score, false);
}
}
public PerformanceKnowledgeBase getPerformanceKnowledgeBase() {
return this.performanceKnowledgeBase;
}
public void setPerformanceKnowledgeBase(final PerformanceKnowledgeBase performanceKnowledgeBase) {
this.performanceKnowledgeBase = performanceKnowledgeBase;
}
public String getBenchmarkName() {
return this.benchmarkName;
}
public void setBenchmarkName(final String benchmarkName) {
this.benchmarkName = benchmarkName;
}
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/IColorMap.java
|
package ai.libs.jaicore.graphvisualizer;
import java.awt.Color;
public interface IColorMap {
public Color get(double min, double max, double val);
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/IntegerAxisFormatter.java
|
package ai.libs.jaicore.graphvisualizer;
import ai.libs.jaicore.basic.MathExt;
import javafx.util.StringConverter;
public class IntegerAxisFormatter extends StringConverter<Number> {
@Override
public String toString(final Number object) {
Double val = MathExt.round(object.doubleValue(), 8);
if (val.intValue() == val) { // consider all numbers that are close to an integer by 10^-8 as ints
String str = String.valueOf(val);
str = str.substring(0, str.indexOf('.'));
return str;
}
return "";
}
@Override
public Number fromString(final String string) {
return null; // not needed
}
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/SeismicColorMap.java
|
package ai.libs.jaicore.graphvisualizer;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.image.BufferedImage;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
public class SeismicColorMap implements IColorMap {
@Override
public Color get(final double min, final double max, final double v) {
final float hBlue = .75f;
final float hRed = 0f;
final float brightness = 1f;
double half = (max + min) / 2.0;
double spectrumToHalf = Math.abs(max - half);
double vAdjusted = Math.min(Math.max(min, v), max);
float relDistanceToHalf = (float)(Math.abs(vAdjusted - half) / spectrumToHalf);
if (vAdjusted < half) {
return Color.getHSBColor(hBlue, relDistanceToHalf, brightness);
}
else {
return Color.getHSBColor(hRed, relDistanceToHalf, brightness);
}
}
public static void main(final String[] args) {
double min = 0;
double max = 1;
SeismicColorMap cm = new SeismicColorMap();
BufferedImage image = new BufferedImage(500/*Width*/, 100/*height*/, BufferedImage.TYPE_INT_ARGB);
for (int i = 0; i < 100; i++) {
double v = min + (i * 1.0 / 100) * (max - min);
Color c = cm.get(min, max, v);
for (int j = 0; j < 100; j++) {
for (int k = 0; k < 5; k ++) {
image.setRGB(i * 5 + k, j, c.getRGB());
}
}
}
JFrame frame = new JFrame();
frame.getContentPane().setLayout(new FlowLayout());
frame.getContentPane().add(new JLabel(new ImageIcon(image)));
frame.pack();
frame.setVisible(true);
}
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events/graph/GraphEvent.java
|
package ai.libs.jaicore.graphvisualizer.events.graph;
import org.api4.java.algorithm.events.IAlgorithmEvent;
public interface GraphEvent extends IAlgorithmEvent {
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events/graph/GraphInitializedEvent.java
|
package ai.libs.jaicore.graphvisualizer.events.graph;
import org.api4.java.algorithm.IAlgorithm;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
public class GraphInitializedEvent<T> extends AAlgorithmEvent implements GraphEvent {
private T root;
public GraphInitializedEvent(final IAlgorithm<?, ?> algorithm, final T root) {
super(algorithm);
this.root = root;
}
public T getRoot() {
return this.root;
}
public void setRoot(final T root) {
this.root = root;
}
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events/graph/NodeAddedEvent.java
|
package ai.libs.jaicore.graphvisualizer.events.graph;
import java.util.Objects;
import org.api4.java.algorithm.IAlgorithm;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
public class NodeAddedEvent<T> extends AAlgorithmEvent implements GraphEvent {
private final T parent;
private final T node;
private final String type;
public NodeAddedEvent(final IAlgorithm<?, ?> algorithm, final T parent, final T node, final String type) {
super(algorithm);
Objects.requireNonNull(parent, "The parent must not be null. If this is the first node, use " + GraphInitializedEvent.class.getName());
Objects.requireNonNull(node, "The new node must not be null!");
this.parent = parent;
this.node = node;
this.type = type;
}
public T getParent() {
return this.parent;
}
public T getNode() {
return this.node;
}
public String getType() {
return this.type;
}
}
|
0
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events
|
java-sources/ai/libs/jaicore-algorithminspector/0.2.7/ai/libs/jaicore/graphvisualizer/events/graph/NodeInfoAlteredEvent.java
|
package ai.libs.jaicore.graphvisualizer.events.graph;
import org.api4.java.algorithm.IAlgorithm;
import ai.libs.jaicore.basic.algorithm.AAlgorithmEvent;
public class NodeInfoAlteredEvent<T> extends AAlgorithmEvent implements GraphEvent {
private final T node;
public NodeInfoAlteredEvent(final IAlgorithm<?, ?> algorithm, final T node) {
super(algorithm);
this.node = node;
}
public T getNode() {
return this.node;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.