index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/GeneratorFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.postag.POSModel;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.XmlUtil;
import opennlp.tools.util.ext.ExtensionLoader;
import opennlp.tools.util.model.ArtifactSerializer;
import opennlp.tools.util.model.DictionarySerializer;
import opennlp.tools.util.model.POSModelSerializer;
/**
* Creates a set of feature generators based on a provided XML descriptor.
*
* Example of an XML descriptor:
*<p>
* <generators>
* <charngram min = "2" max = "5"/>
* <definition/>
* <cache>
* <window prevLength = "3" nextLength = "3">
* <generators>
* <prevmap/>
* <sentence/>
* <tokenclass/>
* <tokenpattern/>
* </generators>
* </window>
* </cache>
* </generators>
* </p>
*
* Each XML element is mapped to a {@link GeneratorFactory.XmlFeatureGeneratorFactory} which
* is responsible to process the element and create the specified
* {@link AdaptiveFeatureGenerator}. Elements can contain other
* elements in this case it is the responsibility of the mapped factory to process
* the child elements correctly. In some factories this leads to recursive
* calls the
* {@link GeneratorFactory.XmlFeatureGeneratorFactory#create(Element, FeatureGeneratorResourceProvider)}
* method.
*
* In the example above the generators element is mapped to the
* {@link GeneratorFactory.AggregatedFeatureGeneratorFactory} which then
* creates all the aggregated {@link AdaptiveFeatureGenerator}s to
* accomplish this it evaluates the mapping with the same mechanism
* and gives the child element to the corresponding factories. All
* created generators are added to a new instance of the
* {@link AggregatedFeatureGenerator} which is then returned.
*/
public class GeneratorFactory {
/**
* The {@link XmlFeatureGeneratorFactory} is responsible to construct
* an {@link AdaptiveFeatureGenerator} from an given XML {@link Element}
* which contains all necessary configuration if any.
*/
public interface XmlFeatureGeneratorFactory {
/**
* Creates an {@link AdaptiveFeatureGenerator} from a the describing
* XML element.
*
* @param generatorElement the element which contains the configuration
* @param resourceManager the resource manager which could be used
* to access referenced resources
*
* @return the configured {@link AdaptiveFeatureGenerator}
*/
AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException;
}
/**
* @see AggregatedFeatureGenerator
*/
static class AggregatedFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Collection<AdaptiveFeatureGenerator> aggregatedGenerators = new LinkedList<>();
NodeList childNodes = generatorElement.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node childNode = childNodes.item(i);
if (childNode instanceof Element) {
Element aggregatedGeneratorElement = (Element) childNode;
aggregatedGenerators.add(
GeneratorFactory.createGenerator(aggregatedGeneratorElement, resourceManager));
}
}
return new AggregatedFeatureGenerator(aggregatedGenerators.toArray(
new AdaptiveFeatureGenerator[aggregatedGenerators.size()]));
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("generators", new AggregatedFeatureGeneratorFactory());
}
}
/**
* @see CachedFeatureGenerator
*/
static class CachedFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
private CachedFeatureGeneratorFactory() {
}
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Element cachedGeneratorElement = null;
NodeList kids = generatorElement.getChildNodes();
for (int i = 0; i < kids.getLength(); i++) {
Node childNode = kids.item(i);
if (childNode instanceof Element) {
cachedGeneratorElement = (Element) childNode;
break;
}
}
if (cachedGeneratorElement == null) {
throw new InvalidFormatException("Could not find containing generator element!");
}
AdaptiveFeatureGenerator cachedGenerator =
GeneratorFactory.createGenerator(cachedGeneratorElement, resourceManager);
return new CachedFeatureGenerator(cachedGenerator);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("cache", new CachedFeatureGeneratorFactory());
}
}
/**
* @see CharacterNgramFeatureGenerator
*/
static class CharacterNgramFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String minString = generatorElement.getAttribute("min");
int min;
try {
min = Integer.parseInt(minString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("min attribute '" + minString + "' is not a number!", e);
}
String maxString = generatorElement.getAttribute("max");
int max;
try {
max = Integer.parseInt(maxString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("max attribute '" + maxString + "' is not a number!", e);
}
return new CharacterNgramFeatureGenerator(min, max);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("charngram", new CharacterNgramFeatureGeneratorFactory());
}
}
/**
* @see DefinitionFeatureGeneratorFactory
*/
static class DefinitionFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
private static final String ELEMENT_NAME = "definition";
private DefinitionFeatureGeneratorFactory() {
}
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
return new OutcomePriorFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put(ELEMENT_NAME, new DefinitionFeatureGeneratorFactory());
}
}
/**
* @see DictionaryFeatureGenerator
*/
static class DictionaryFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof Dictionary)) {
throw new InvalidFormatException("No dictionary resource for key: " + dictResourceKey);
}
String prefix = generatorElement.getAttribute("prefix");
return new DictionaryFeatureGenerator(prefix, (Dictionary) dictResource);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("dictionary", new DictionaryFeatureGeneratorFactory());
}
}
static class DocumentBeginFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new DocumentBeginFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("docbegin", new DocumentBeginFeatureGeneratorFactory());
}
}
/**
* Defines a word cluster generator factory; it reads an element containing
* 'w2vwordcluster' as a tag name; these clusters are typically produced by
* word2vec or clark pos induction systems.
*/
static class WordClusterFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
boolean lowerCaseDictionary = "true".equals(generatorElement.getAttribute("lowerCase"));
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof WordClusterDictionary)) {
throw new InvalidFormatException("Not a WordClusterDictionary resource for key: "
+ dictResourceKey);
}
return new WordClusterFeatureGenerator((WordClusterDictionary) dictResource,
dictResourceKey, lowerCaseDictionary);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("wordcluster", new WordClusterFeatureGeneratorFactory());
}
}
/**
* Generates Brown clustering features for current token.
*/
static class BrownClusterTokenFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof BrownCluster)) {
throw new InvalidFormatException("Not a BrownLexicon resource for key: " + dictResourceKey);
}
return new BrownTokenFeatureGenerator((BrownCluster) dictResource);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("brownclustertoken", new BrownClusterTokenFeatureGeneratorFactory());
}
}
/**
* Generates Brown clustering features for token classes.
*/
static class BrownClusterTokenClassFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof BrownCluster)) {
throw new InvalidFormatException("Not a BrownLexicon resource for key: " + dictResourceKey);
}
return new BrownTokenClassFeatureGenerator((BrownCluster) dictResource);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("brownclustertokenclass", new BrownClusterTokenClassFeatureGeneratorFactory());
}
}
/**
* Generates Brown clustering features for token bigrams.
*/
static class BrownClusterBigramFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof BrownCluster)) {
throw new InvalidFormatException("Not a BrownLexicon resource for key: " + dictResourceKey);
}
return new BrownBigramFeatureGenerator((BrownCluster) dictResource);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("brownclusterbigram", new BrownClusterBigramFeatureGeneratorFactory());
}
}
/**
* @see PreviousMapFeatureGenerator
*/
static class PreviousMapFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new PreviousMapFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("prevmap", new PreviousMapFeatureGeneratorFactory());
}
}
// TODO: Add parameters ...
/**
* @see SentenceFeatureGenerator
*/
static class SentenceFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
String beginFeatureString = generatorElement.getAttribute("begin");
boolean beginFeature = true;
if (beginFeatureString.length() != 0)
beginFeature = Boolean.parseBoolean(beginFeatureString);
String endFeatureString = generatorElement.getAttribute("end");
boolean endFeature = true;
if (endFeatureString.length() != 0)
endFeature = Boolean.parseBoolean(endFeatureString);
return new SentenceFeatureGenerator(beginFeature, endFeature);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("sentence", new SentenceFeatureGeneratorFactory());
}
}
/**
* @see TokenClassFeatureGenerator
*/
static class TokenClassFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
String attribute = generatorElement.getAttribute("wordAndClass");
// Default to true.
boolean generateWordAndClassFeature = true;
if (!Objects.equals(attribute, "")) {
// Anything other than "true" sets it to false.
if (!"true".equalsIgnoreCase(attribute)) {
generateWordAndClassFeature = false;
}
}
return new TokenClassFeatureGenerator(generateWordAndClassFeature);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("tokenclass", new TokenClassFeatureGeneratorFactory());
}
}
static class TokenFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new TokenFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("token", new TokenFeatureGeneratorFactory());
}
}
static class BigramNameFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new BigramNameFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("bigram", new BigramNameFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class TokenPatternFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new TokenPatternFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("tokenpattern", new TokenPatternFeatureGeneratorFactory());
}
}
static class PosTaggerFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new PosTaggerFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("postagger", new PosTaggerFeatureGeneratorFactory());
}
}
/**
* @see WindowFeatureGenerator
*/
static class WindowFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Element nestedGeneratorElement = null;
NodeList kids = generatorElement.getChildNodes();
for (int i = 0; i < kids.getLength(); i++) {
Node childNode = kids.item(i);
if (childNode instanceof Element) {
nestedGeneratorElement = (Element) childNode;
break;
}
}
if (nestedGeneratorElement == null) {
throw new InvalidFormatException("window feature generator must contain" +
" an aggregator element");
}
AdaptiveFeatureGenerator nestedGenerator =
GeneratorFactory.createGenerator(nestedGeneratorElement, resourceManager);
String prevLengthString = generatorElement.getAttribute("prevLength");
int prevLength;
try {
prevLength = Integer.parseInt(prevLengthString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("prevLength attribute '" + prevLengthString
+ "' is not a number!", e);
}
String nextLengthString = generatorElement.getAttribute("nextLength");
int nextLength;
try {
nextLength = Integer.parseInt(nextLengthString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("nextLength attribute '" + nextLengthString
+ "' is not a number!", e);
}
return new WindowFeatureGenerator(nestedGenerator, prevLength, nextLength);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("window", new WindowFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class PrefixFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
String attribute = generatorElement.getAttribute("length");
int prefixLength = PrefixFeatureGenerator.DEFAULT_MAX_LENGTH;
if (!Objects.equals(attribute, "")) {
prefixLength = Integer.parseInt(attribute);
}
return new PrefixFeatureGenerator(prefixLength);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("prefix", new PrefixFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class SuffixFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
String attribute = generatorElement.getAttribute("length");
int suffixLength = SuffixFeatureGenerator.DEFAULT_MAX_LENGTH;
if (!Objects.equals(attribute, "")) {
suffixLength = Integer.parseInt(attribute);
}
return new SuffixFeatureGenerator(suffixLength);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("suffix", new SuffixFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class POSTaggerNameFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager)
throws InvalidFormatException {
String modelResourceKey = generatorElement.getAttribute("model");
POSModel model = (POSModel)resourceManager.getResource(modelResourceKey);
return new POSTaggerNameFeatureGenerator(model);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("tokenpos", new POSTaggerNameFeatureGeneratorFactory());
}
}
// TODO: We have to support custom resources here. How does it work ?!
// Attributes get into a Map<String, String> properties
// How can serialization be supported ?!
// The model is loaded, and the manifest should contain all serializer classes registered for the
// resources by name.
// When training, the descriptor could be consulted first to register the serializers, and afterwards
// they are stored in the model.
static class CustomFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String featureGeneratorClassName = generatorElement.getAttribute("class");
AdaptiveFeatureGenerator generator =
ExtensionLoader.instantiateExtension(AdaptiveFeatureGenerator.class, featureGeneratorClassName);
if (generator instanceof CustomFeatureGenerator) {
CustomFeatureGenerator customGenerator = (CustomFeatureGenerator) generator;
Map<String, String> properties = new HashMap<>();
NamedNodeMap attributes = generatorElement.getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
Node attribute = attributes.item(i);
if (!"class".equals(attribute.getNodeName())) {
properties.put(attribute.getNodeName(), attribute.getNodeValue());
}
}
if (resourceManager != null) {
customGenerator.init(properties, resourceManager);
}
}
return generator;
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("custom", new CustomFeatureGeneratorFactory());
}
}
private static Map<String, XmlFeatureGeneratorFactory> factories = new HashMap<>();
static {
AggregatedFeatureGeneratorFactory.register(factories);
CachedFeatureGeneratorFactory.register(factories);
CharacterNgramFeatureGeneratorFactory.register(factories);
DefinitionFeatureGeneratorFactory.register(factories);
DictionaryFeatureGeneratorFactory.register(factories);
DocumentBeginFeatureGeneratorFactory.register(factories);
PreviousMapFeatureGeneratorFactory.register(factories);
SentenceFeatureGeneratorFactory.register(factories);
TokenClassFeatureGeneratorFactory.register(factories);
TokenFeatureGeneratorFactory.register(factories);
BigramNameFeatureGeneratorFactory.register(factories);
TokenPatternFeatureGeneratorFactory.register(factories);
PosTaggerFeatureGeneratorFactory.register(factories);
PrefixFeatureGeneratorFactory.register(factories);
SuffixFeatureGeneratorFactory.register(factories);
WindowFeatureGeneratorFactory.register(factories);
WordClusterFeatureGeneratorFactory.register(factories);
BrownClusterTokenFeatureGeneratorFactory.register(factories);
BrownClusterTokenClassFeatureGeneratorFactory.register(factories);
BrownClusterBigramFeatureGeneratorFactory.register(factories);
CustomFeatureGeneratorFactory.register(factories);
POSTaggerNameFeatureGeneratorFactory.register(factories);
}
/**
* Creates a {@link AdaptiveFeatureGenerator} for the provided element.
* To accomplish this it looks up the corresponding factory by the
* element tag name. The factory is then responsible for the creation
* of the generator from the element.
*
* @param generatorElement
* @param resourceManager
*
* @return
*/
static AdaptiveFeatureGenerator createGenerator(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String elementName = generatorElement.getTagName();
XmlFeatureGeneratorFactory generatorFactory = factories.get(elementName);
if (generatorFactory == null) {
throw new InvalidFormatException("Unexpected element: " + elementName);
}
return generatorFactory.create(generatorElement, resourceManager);
}
private static org.w3c.dom.Document createDOM(InputStream xmlDescriptorIn)
throws IOException {
DocumentBuilder documentBuilder = XmlUtil.createDocumentBuilder();
org.w3c.dom.Document xmlDescriptorDOM;
try {
xmlDescriptorDOM = documentBuilder.parse(xmlDescriptorIn);
} catch (SAXException e) {
throw new InvalidFormatException("Descriptor is not valid XML!", e);
}
return xmlDescriptorDOM;
}
/**
* Creates an {@link AdaptiveFeatureGenerator} from an provided XML descriptor.
*
* Usually this XML descriptor contains a set of nested feature generators
* which are then used to generate the features by one of the opennlp
* components.
*
* @param xmlDescriptorIn the {@link InputStream} from which the descriptor
* is read, the stream remains open and must be closed by the caller.
*
* @param resourceManager the resource manager which is used to resolve resources
* referenced by a key in the descriptor
*
* @return created feature generators
*
* @throws IOException if an error occurs during reading from the descriptor
* {@link InputStream}
*/
public static AdaptiveFeatureGenerator create(InputStream xmlDescriptorIn,
FeatureGeneratorResourceProvider resourceManager) throws IOException {
org.w3c.dom.Document xmlDescriptorDOM = createDOM(xmlDescriptorIn);
Element generatorElement = xmlDescriptorDOM.getDocumentElement();
return createGenerator(generatorElement, resourceManager);
}
public static Map<String, ArtifactSerializer<?>> extractArtifactSerializerMappings(
InputStream xmlDescriptorIn) throws IOException {
Map<String, ArtifactSerializer<?>> mapping = new HashMap<>();
org.w3c.dom.Document xmlDescriptorDOM = createDOM(xmlDescriptorIn);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList customElements;
try {
XPathExpression exp = xPath.compile("//custom");
customElements = (NodeList) exp.evaluate(xmlDescriptorDOM.getDocumentElement(), XPathConstants.NODESET);
} catch (XPathExpressionException e) {
throw new IllegalStateException("The hard coded XPath expression should always be valid!");
}
for (int i = 0; i < customElements.getLength(); i++) {
if (customElements.item(i) instanceof Element) {
Element customElement = (Element) customElements.item(i);
// Note: The resource provider is not available at that point, to provide
// resources they need to be loaded first!
AdaptiveFeatureGenerator generator = createGenerator(customElement, null);
if (generator instanceof ArtifactToSerializerMapper) {
ArtifactToSerializerMapper mapper = (ArtifactToSerializerMapper) generator;
mapping.putAll(mapper.getArtifactSerializerMapping());
}
}
}
NodeList allElements;
try {
XPathExpression exp = xPath.compile("//*");
allElements = (NodeList) exp.evaluate(xmlDescriptorDOM.getDocumentElement(), XPathConstants.NODESET);
} catch (XPathExpressionException e) {
throw new IllegalStateException("The hard coded XPath expression should always be valid!");
}
for (int i = 0; i < allElements.getLength(); i++) {
if (allElements.item(i) instanceof Element) {
Element xmlElement = (Element) allElements.item(i);
String dictName = xmlElement.getAttribute("dict");
if (dictName != null) {
switch (xmlElement.getTagName()) {
case "wordcluster":
mapping.put(dictName, new WordClusterDictionary.WordClusterDictionarySerializer());
break;
case "brownclustertoken":
mapping.put(dictName, new BrownCluster.BrownClusterSerializer());
break;
case "brownclustertokenclass"://, ;
mapping.put(dictName, new BrownCluster.BrownClusterSerializer());
break;
case "brownclusterbigram": //, ;
mapping.put(dictName, new BrownCluster.BrownClusterSerializer());
break;
case "dictionary":
mapping.put(dictName, new DictionarySerializer());
break;
}
}
String modelName = xmlElement.getAttribute("model");
if (modelName != null) {
switch (xmlElement.getTagName()) {
case "tokenpos":
mapping.put(modelName, new POSModelSerializer());
break;
}
}
}
}
return mapping;
}
/**
* Provides a list with all the elements in the xml feature descriptor.
* @param xmlDescriptorIn the xml feature descriptor
* @return a list containing all elements
* @throws IOException if inputstream cannot be open
* @throws InvalidFormatException if xml is not well-formed
*/
public static List<Element> getDescriptorElements(InputStream xmlDescriptorIn)
throws IOException {
List<Element> elements = new ArrayList<>();
org.w3c.dom.Document xmlDescriptorDOM = createDOM(xmlDescriptorIn);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList allElements;
try {
XPathExpression exp = xPath.compile("//*");
allElements = (NodeList) exp.evaluate(xmlDescriptorDOM.getDocumentElement(), XPathConstants.NODESET);
} catch (XPathExpressionException e) {
throw new IllegalStateException("The hard coded XPath expression should always be valid!");
}
for (int i = 0; i < allElements.getLength(); i++) {
if (allElements.item(i) instanceof Element) {
Element customElement = (Element) allElements.item(i);
elements.add(customElement);
}
}
return elements;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/InSpanGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
import java.util.Objects;
import opennlp.tools.namefind.TokenNameFinder;
import opennlp.tools.util.Span;
/**
* Generates features if the tokens are recognized by the provided
* {@link TokenNameFinder}.
*/
public class InSpanGenerator implements AdaptiveFeatureGenerator {
private final String prefix;
private final TokenNameFinder finder;
private String[] currentSentence;
private Span[] currentNames;
/**
* Initializes the current instance.
*
* @param prefix the prefix is used to distinguish the generated features
* from features generated by other instances of {@link InSpanGenerator}s.
* @param finder the {@link TokenNameFinder} used to detect the names.
*/
public InSpanGenerator(String prefix, TokenNameFinder finder) {
this.prefix = Objects.requireNonNull(prefix, "prefix must not be null");
this.finder = Objects.requireNonNull(finder, "finder must not be null");
}
public void createFeatures(List<String> features, String[] tokens, int index,
String[] preds) {
// cache results for sentence
if (currentSentence != tokens) {
currentSentence = tokens;
currentNames = finder.find(tokens);
}
// iterate over names and check if a span is contained
for (Span currentName : currentNames) {
if (currentName.contains(index)) {
// found a span for the current token
features.add(prefix + ":w=dic");
features.add(prefix + ":w=dic=" + tokens[index]);
// TODO: consider generation start and continuation features
break;
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/OutcomePriorFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
/**
* The definition feature maps the underlying distribution of outcomes.
*/
public class OutcomePriorFeatureGenerator implements AdaptiveFeatureGenerator {
private static final String OUTCOME_PRIOR_FEATURE = "def";
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
features.add(OUTCOME_PRIOR_FEATURE);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/POSTaggerNameFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.Arrays;
import java.util.List;
import opennlp.tools.postag.POSModel;
import opennlp.tools.postag.POSTagger;
import opennlp.tools.postag.POSTaggerME;
/**
* Adds the token POS Tag as feature. Requires a POS Tag model.
*/
public class POSTaggerNameFeatureGenerator implements AdaptiveFeatureGenerator {
private POSTagger posTagger;
private String[] cachedTokens;
private String[] cachedTags;
/**
* Initializes a new instance.
*
* @param aPosTagger a POSTagger implementation.
*/
public POSTaggerNameFeatureGenerator(POSTagger aPosTagger) {
this.posTagger = aPosTagger;
}
/**
* Initializes a new instance.
*
* @param aPosModel a POSTagger model.
*/
public POSTaggerNameFeatureGenerator(POSModel aPosModel) {
this.posTagger = new POSTaggerME(aPosModel);
}
public void createFeatures(List<String> feats, String[] toks, int index, String[] preds) {
if (!Arrays.equals(this.cachedTokens, toks)) {
this.cachedTokens = toks;
this.cachedTags = this.posTagger.tag(toks);
}
feats.add("pos=" + this.cachedTags[index]);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/PosTaggerFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
public class PosTaggerFeatureGenerator implements AdaptiveFeatureGenerator {
private final String SB = "S=begin";
@Override
public void createFeatures(List<String> features, String[] tokens, int index,
String[] tags) {
String prev, prevprev = null;
String tagprev, tagprevprev;
tagprev = tagprevprev = null;
if (index - 1 >= 0) {
prev = tokens[index - 1];
tagprev = tags[index - 1];
if (index - 2 >= 0) {
prevprev = tokens[index - 2];
tagprevprev = tags[index - 2];
}
else {
prevprev = SB;
}
}
else {
prev = SB;
}
// add the words and pos's of the surrounding context
if (prev != null) {
if (tagprev != null) {
features.add("t=" + tagprev);
}
if (prevprev != null) {
if (tagprevprev != null) {
features.add("t2=" + tagprevprev + "," + tagprev);
}
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/PrefixFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
public class PrefixFeatureGenerator implements AdaptiveFeatureGenerator {
static final int DEFAULT_MAX_LENGTH = 4;
private final int prefixLength;
public PrefixFeatureGenerator() {
prefixLength = DEFAULT_MAX_LENGTH;
}
public PrefixFeatureGenerator(int prefixLength) {
this.prefixLength = prefixLength;
}
@Override
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
String[] prefs = getPrefixes(tokens[index]);
for (String pref : prefs) {
features.add("pre=" + pref);
}
}
private String[] getPrefixes(String lex) {
int prefixes = Math.min(prefixLength, lex.length());
String[] prefs = new String[prefixes];
for (int li = 0; li < prefixes; li++) {
prefs[li] = lex.substring(0, Math.min(li + 1, lex.length()));
}
return prefs;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/PreviousMapFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This {@link AdaptiveFeatureGenerator} generates features indicating the
* outcome associated with a previously occuring word.
*/
public class PreviousMapFeatureGenerator implements AdaptiveFeatureGenerator {
private Map<String, String> previousMap = new HashMap<>();
public void createFeatures(List<String> features, String[] tokens, int index, String[] preds) {
features.add("pd=" + previousMap.get(tokens[index]));
}
/**
* Generates previous decision features for the token based on contents of the previous map.
*/
public void updateAdaptiveData(String[] tokens, String[] outcomes) {
for (int i = 0; i < tokens.length; i++) {
previousMap.put(tokens[i], outcomes[i]);
}
}
/**
* Clears the previous map.
*/
public void clearAdaptiveData() {
previousMap.clear();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/PreviousTwoMapFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This {@link AdaptiveFeatureGenerator} generates features indicating the
* outcome associated with two previously occuring words.
*/
public class PreviousTwoMapFeatureGenerator implements AdaptiveFeatureGenerator {
private Map<String, String> previousMap = new HashMap<>();
/**
* Generates previous decision features for the token based on contents of the previous map.
*/
public void createFeatures(List<String> features, String[] tokens, int index, String[] preds) {
if (index > 0) {
features.add("ppd=" + previousMap.get(tokens[index]) + "," +
previousMap.get(tokens[index - 1]));
}
}
public void updateAdaptiveData(String[] tokens, String[] outcomes) {
for (int i = 0; i < tokens.length; i++) {
previousMap.put(tokens[i], outcomes[i]);
}
}
/**
* Clears the previous map.
*/
public void clearAdaptiveData() {
previousMap.clear();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/SentenceFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
/**
* This feature generator creates sentence begin and end features.
*/
public class SentenceFeatureGenerator implements AdaptiveFeatureGenerator {
private final boolean isGenerateFirstWordFeature;
private final boolean isGenerateLastWordFeature;
public SentenceFeatureGenerator(boolean isGenerateFirstWordFeature,
boolean isGenerateLastWordFeature) {
this.isGenerateFirstWordFeature = isGenerateFirstWordFeature;
this.isGenerateLastWordFeature = isGenerateLastWordFeature;
}
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
if (isGenerateFirstWordFeature && index == 0) {
features.add("S=begin");
}
if (isGenerateLastWordFeature && tokens.length == index + 1) {
features.add("S=end");
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/StringPattern.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
/**
* Recognizes predefined patterns in strings.
*/
public class StringPattern {
private static final int INITAL_CAPITAL_LETTER = 0x1;
private static final int ALL_CAPITAL_LETTER = 0x1 << 1;
private static final int ALL_LOWERCASE_LETTER = 0x1 << 2;
private static final int ALL_LETTERS = 0x1 << 3;
private static final int ALL_DIGIT = 0x1 << 4;
private static final int CONTAINS_PERIOD = 0x1 << 5;
private static final int CONTAINS_COMMA = 0x1 << 6;
private static final int CONTAINS_SLASH = 0x1 << 7;
private static final int CONTAINS_DIGIT = 0x1 << 8;
private static final int CONTAINS_HYPHEN = 0x1 << 9;
private static final int CONTAINS_LETTERS = 0x1 << 10;
private static final int CONTAINS_UPPERCASE = 0x1 << 11;
private final int pattern;
private final int digits;
private StringPattern(int pattern, int digits) {
this.pattern = pattern;
this.digits = digits;
}
public static StringPattern recognize(String token) {
int pattern = ALL_CAPITAL_LETTER | ALL_LOWERCASE_LETTER | ALL_DIGIT | ALL_LETTERS;
int digits = 0;
for (int i = 0; i < token.length(); i++) {
final char ch = token.charAt(i);
final int letterType = Character.getType(ch);
boolean isLetter = letterType == Character.UPPERCASE_LETTER ||
letterType == Character.LOWERCASE_LETTER ||
letterType == Character.TITLECASE_LETTER ||
letterType == Character.MODIFIER_LETTER ||
letterType == Character.OTHER_LETTER;
if (isLetter) {
pattern |= CONTAINS_LETTERS;
pattern &= ~ALL_DIGIT;
if (letterType == Character.UPPERCASE_LETTER) {
if (i == 0) {
pattern |= INITAL_CAPITAL_LETTER;
}
pattern |= CONTAINS_UPPERCASE;
pattern &= ~ALL_LOWERCASE_LETTER;
} else {
pattern &= ~ALL_CAPITAL_LETTER;
}
} else {
// contains chars other than letter, this means
// it can not be one of these:
pattern &= ~ALL_LETTERS;
pattern &= ~ALL_CAPITAL_LETTER;
pattern &= ~ALL_LOWERCASE_LETTER;
if (letterType == Character.DECIMAL_DIGIT_NUMBER) {
pattern |= CONTAINS_DIGIT;
digits++;
} else {
pattern &= ~ALL_DIGIT;
}
switch (ch) {
case ',':
pattern |= CONTAINS_COMMA;
break;
case '.':
pattern |= CONTAINS_PERIOD;
break;
case '/':
pattern |= CONTAINS_SLASH;
break;
case '-':
pattern |= CONTAINS_HYPHEN;
break;
default:
break;
}
}
}
return new StringPattern(pattern, digits);
}
/**
* @return true if all characters are letters.
*/
public boolean isAllLetter() {
return (pattern & ALL_LETTERS) > 0;
}
/**
* @return true if first letter is capital.
*/
public boolean isInitialCapitalLetter() {
return (pattern & INITAL_CAPITAL_LETTER) > 0;
}
/**
* @return true if all letters are capital.
*/
public boolean isAllCapitalLetter() {
return (pattern & ALL_CAPITAL_LETTER) > 0;
}
/**
* @return true if all letters are lower case.
*/
public boolean isAllLowerCaseLetter() {
return (pattern & ALL_LOWERCASE_LETTER) > 0;
}
/**
* @return true if all chars are digits.
*/
public boolean isAllDigit() {
return (pattern & ALL_DIGIT) > 0;
}
/**
* Retrieves the number of digits.
*/
public int digits() {
return digits;
}
public boolean containsPeriod() {
return (pattern & CONTAINS_PERIOD) > 0;
}
public boolean containsComma() {
return (pattern & CONTAINS_COMMA) > 0;
}
public boolean containsSlash() {
return (pattern & CONTAINS_SLASH) > 0;
}
public boolean containsDigit() {
return (pattern & CONTAINS_DIGIT) > 0;
}
public boolean containsHyphen() {
return (pattern & CONTAINS_HYPHEN) > 0;
}
public boolean containsLetters() {
return (pattern & CONTAINS_LETTERS) > 0;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/SuffixFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
public class SuffixFeatureGenerator implements AdaptiveFeatureGenerator {
static final int DEFAULT_MAX_LENGTH = 4;
private final int suffixLength;
public SuffixFeatureGenerator() {
suffixLength = DEFAULT_MAX_LENGTH;
}
public SuffixFeatureGenerator(int suffixLength) {
this.suffixLength = suffixLength;
}
@Override
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
String[] suffs = getSuffixes(tokens[index]);
for (String suff : suffs) {
features.add("suf=" + suff);
}
}
private String[] getSuffixes(String lex) {
int suffixes = Math.min(suffixLength, lex.length());
String[] suffs = new String[suffixes];
for (int li = 0; li < suffixes; li++) {
suffs[li] = lex.substring(Math.max(lex.length() - li - 1, 0));
}
return suffs;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/TokenClassFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
import opennlp.tools.util.StringUtil;
/**
* Generates features for different for the class of the token.
*/
public class TokenClassFeatureGenerator implements AdaptiveFeatureGenerator {
private static final String TOKEN_CLASS_PREFIX = "wc";
private static final String TOKEN_AND_CLASS_PREFIX = "w&c";
private boolean generateWordAndClassFeature;
public TokenClassFeatureGenerator() {
this(false);
}
public TokenClassFeatureGenerator(boolean generateWordAndClassFeature) {
this.generateWordAndClassFeature = generateWordAndClassFeature;
}
public void createFeatures(List<String> features, String[] tokens, int index, String[] preds) {
String wordClass = FeatureGeneratorUtil.tokenFeature(tokens[index]);
features.add(TOKEN_CLASS_PREFIX + "=" + wordClass);
if (generateWordAndClassFeature) {
features.add(TOKEN_AND_CLASS_PREFIX + "=" + StringUtil.toLowerCase(tokens[index]) +
"," + wordClass);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/TokenFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
import opennlp.tools.util.StringUtil;
/**
* Generates a feature which contains the token itself.
*/
public class TokenFeatureGenerator implements AdaptiveFeatureGenerator {
private static final String WORD_PREFIX = "w";
private boolean lowercase;
public TokenFeatureGenerator(boolean lowercase) {
this.lowercase = lowercase;
}
public TokenFeatureGenerator() {
this(true);
}
public void createFeatures(List<String> features, String[] tokens, int index, String[] preds) {
if (lowercase) {
features.add(WORD_PREFIX + "=" + StringUtil.toLowerCase(tokens[index]));
}
else {
features.add(WORD_PREFIX + "=" + tokens[index]);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/TokenPatternFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
import java.util.regex.Pattern;
import opennlp.tools.tokenize.SimpleTokenizer;
import opennlp.tools.tokenize.Tokenizer;
import opennlp.tools.util.StringUtil;
/**
* Partitions tokens into sub-tokens based on character classes and generates
* class features for each of the sub-tokens and combinations of those sub-tokens.
*/
public class TokenPatternFeatureGenerator implements AdaptiveFeatureGenerator {
private Pattern noLetters = Pattern.compile("[^a-zA-Z]");
private Tokenizer tokenizer;
/**
* Initializes a new instance.
* For tokinization the {@link SimpleTokenizer} is used.
*/
public TokenPatternFeatureGenerator() {
this(SimpleTokenizer.INSTANCE);
}
/**
* Initializes a new instance.
*
* @param supportTokenizer
*/
public TokenPatternFeatureGenerator(Tokenizer supportTokenizer) {
tokenizer = supportTokenizer;
}
public void createFeatures(List<String> feats, String[] toks, int index, String[] preds) {
String[] tokenized = tokenizer.tokenize(toks[index]);
if (tokenized.length == 1) {
feats.add("st=" + StringUtil.toLowerCase(toks[index]));
return;
}
feats.add("stn=" + tokenized.length);
StringBuilder pattern = new StringBuilder();
for (int i = 0; i < tokenized.length; i++) {
if (i < tokenized.length - 1) {
feats.add("pt2=" + FeatureGeneratorUtil.tokenFeature(tokenized[i]) +
FeatureGeneratorUtil.tokenFeature(tokenized[i + 1]));
}
if (i < tokenized.length - 2) {
feats.add("pt3=" + FeatureGeneratorUtil.tokenFeature(tokenized[i]) +
FeatureGeneratorUtil.tokenFeature(tokenized[i + 1]) +
FeatureGeneratorUtil.tokenFeature(tokenized[i + 2]));
}
pattern.append(FeatureGeneratorUtil.tokenFeature(tokenized[i]));
if (!noLetters.matcher(tokenized[i]).find()) {
feats.add("st=" + StringUtil.toLowerCase(tokenized[i]));
}
}
feats.add("pta=" + pattern.toString());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/TrigramNameFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
/**
* Adds trigram features based on tokens and token classes.
*
*/
public class TrigramNameFeatureGenerator implements AdaptiveFeatureGenerator {
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
String wc = FeatureGeneratorUtil.tokenFeature(tokens[index]);
// trigram features
if (index > 1) {
features.add("ppw,pw,w=" + tokens[index - 2] + "," + tokens[index - 1] + "," + tokens[index]);
String pwc = FeatureGeneratorUtil.tokenFeature(tokens[index - 1]);
String ppwc = FeatureGeneratorUtil.tokenFeature(tokens[index - 2]);
features.add("ppwc,pwc,wc=" + ppwc + "," + pwc + "," + wc);
}
if (index + 2 < tokens.length) {
features.add("w,nw,nnw=" + tokens[index] + "," + tokens[index + 1] + "," + tokens[index + 2]);
String nwc = FeatureGeneratorUtil.tokenFeature(tokens[index + 1]);
String nnwc = FeatureGeneratorUtil.tokenFeature(tokens[index + 2]);
features.add("wc,nwc,nnwc=" + wc + "," + nwc + "," + nnwc);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/WindowFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.ArrayList;
import java.util.List;
/**
* Generates previous and next features for a given {@link AdaptiveFeatureGenerator}.
* The window size can be specified.
*
* Features:
* Current token is always included unchanged
* Previous tokens are prefixed with p distance
* Next tokens are prefix with n distance
*/
public class WindowFeatureGenerator implements AdaptiveFeatureGenerator {
public static final String PREV_PREFIX = "p";
public static final String NEXT_PREFIX = "n";
private final AdaptiveFeatureGenerator generator;
private final int prevWindowSize;
private final int nextWindowSize;
/**
* Initializes the current instance with the given parameters.
*
* @param generator Feature generator to apply to the window.
* @param prevWindowSize Size of the window to the left of the current token.
* @param nextWindowSize Size of the window to the right of the current token.
*/
public WindowFeatureGenerator(AdaptiveFeatureGenerator generator, int prevWindowSize, int nextWindowSize) {
this.generator = generator;
this.prevWindowSize = prevWindowSize;
this.nextWindowSize = nextWindowSize;
}
/**
* Initializes the current instance with the given parameters.
*
* @param prevWindowSize
* @param nextWindowSize
* @param generators
*/
public WindowFeatureGenerator(int prevWindowSize, int nextWindowSize,
AdaptiveFeatureGenerator... generators) {
this(new AggregatedFeatureGenerator(generators), prevWindowSize, nextWindowSize);
}
/**
* Initializes the current instance. The previous and next window size is 5.
*
* @param generator feature generator
*/
public WindowFeatureGenerator(AdaptiveFeatureGenerator generator) {
this(generator, 5, 5);
}
/**
* Initializes the current instance with the given parameters.
*
* @param generators array of feature generators
*/
public WindowFeatureGenerator(AdaptiveFeatureGenerator... generators) {
this(new AggregatedFeatureGenerator(generators), 5, 5);
}
public void createFeatures(List<String> features, String[] tokens, int index, String[] preds) {
// current features
generator.createFeatures(features, tokens, index, preds);
// previous features
for (int i = 1; i < prevWindowSize + 1; i++) {
if (index - i >= 0) {
List<String> prevFeatures = new ArrayList<>();
generator.createFeatures(prevFeatures, tokens, index - i, preds);
for (String prevFeature : prevFeatures) {
features.add(PREV_PREFIX + i + prevFeature);
}
}
}
// next features
for (int i = 1; i < nextWindowSize + 1; i++) {
if (i + index < tokens.length) {
List<String> nextFeatures = new ArrayList<>();
generator.createFeatures(nextFeatures, tokens, index + i, preds);
for (String nextFeature : nextFeatures) {
features.add(NEXT_PREFIX + i + nextFeature);
}
}
}
}
public void updateAdaptiveData(String[] tokens, String[] outcomes) {
generator.updateAdaptiveData(tokens, outcomes);
}
public void clearAdaptiveData() {
generator.clearAdaptiveData();
}
@Override
public String toString() {
return super.toString() + ": Prev window size: " + prevWindowSize
+ ", Next window size: " + nextWindowSize;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/WordClusterDictionary.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.util.model.ArtifactSerializer;
import opennlp.tools.util.model.SerializableArtifact;
public class WordClusterDictionary implements SerializableArtifact {
public static class WordClusterDictionarySerializer implements ArtifactSerializer<WordClusterDictionary> {
public WordClusterDictionary create(InputStream in) throws IOException {
return new WordClusterDictionary(in);
}
public void serialize(WordClusterDictionary artifact, OutputStream out) throws IOException {
artifact.serialize(out);
}
}
private Map<String, String> tokenToClusterMap = new HashMap<>();
/**
* Read word2vec and clark clustering style lexicons.
* @param in the inputstream
* @throws IOException the io exception
*/
public WordClusterDictionary(InputStream in) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
String line;
while ((line = reader.readLine()) != null) {
String[] parts = line.split(" ");
if (parts.length == 3) {
tokenToClusterMap.put(parts[0], parts[1].intern());
} else if (parts.length == 2) {
tokenToClusterMap.put(parts[0], parts[1].intern());
}
}
}
public String lookupToken(String string) {
return tokenToClusterMap.get(string);
}
public void serialize(OutputStream out) throws IOException {
Writer writer = new BufferedWriter(new OutputStreamWriter(out));
for (Map.Entry<String, String> entry : tokenToClusterMap.entrySet()) {
writer.write(entry.getKey() + " " + entry.getValue() + "\n");
}
writer.flush();
}
public Class<?> getArtifactSerializerClass() {
return WordClusterDictionarySerializer.class;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/WordClusterFeatureGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.util.List;
import opennlp.tools.util.StringUtil;
public class WordClusterFeatureGenerator implements AdaptiveFeatureGenerator {
private WordClusterDictionary tokenDictionary;
private String resourceName;
private boolean lowerCaseDictionary;
public WordClusterFeatureGenerator(WordClusterDictionary dict,
String dictResourceKey, boolean lowerCaseDictionary) {
tokenDictionary = dict;
resourceName = dictResourceKey;
this.lowerCaseDictionary = lowerCaseDictionary;
}
public void createFeatures(List<String> features, String[] tokens, int index,
String[] previousOutcomes) {
String clusterId;
if (lowerCaseDictionary) {
clusterId = tokenDictionary.lookupToken(StringUtil.toLowerCase(tokens[index]));
} else {
clusterId = tokenDictionary.lookupToken(tokens[index]);
}
if (clusterId != null) {
features.add(resourceName + clusterId);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/featuregen/package-info.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains classes for generating sequence features.
*/
package opennlp.tools.util.featuregen;
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/java/Experimental.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.java;
import java.lang.annotation.Documented;
/**
* Indicates that the API is not stable.
*/
@Documented
public @interface Experimental {
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ArtifactProvider.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
/**
* Provides access to model persisted artifacts.
*/
public interface ArtifactProvider {
/**
* Gets an artifact by name
*/
public <T> T getArtifact(String key);
/**
* Retrieves the value to the given key from the manifest.properties
* entry.
*
* @param key
*
* @return the value
*/
public String getManifestProperty(String key);
/**
* Retrieves the language code of the material which was used to train the
* model or x-unspecified if non was set.
*
* @return the language code of this model
*/
public String getLanguage();
/**
* Indicates if this provider was loaded from serialized. It is useful, for
* example, while validating artifacts: you can skip the time consuming ones
* if they where already validated during the serialization.
*
* @return true if this model was loaded from serialized
*/
public boolean isLoadedFromSerialized();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ArtifactSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Responsible to create an artifact from an {@link InputStream}.
*/
public interface ArtifactSerializer<T> {
/**
* Creates the artifact from the provided {@link InputStream}.
*
* The {@link InputStream} remains open.
*
* @return the artifact
*
* @throws IOException
*/
T create(InputStream in) throws IOException;
/**
* Serializes the artifact to the provided {@link OutputStream}.
*
* The {@link OutputStream} remains open.
*
* @param artifact
* @param out
* @throws IOException
*/
void serialize(T artifact, OutputStream out) throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/BaseModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.URL;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Properties;
import java.util.UUID;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.util.BaseToolFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.Version;
import opennlp.tools.util.ext.ExtensionLoader;
/**
* This model is a common based which can be used by the components
* model classes.
*
* TODO:
* Provide sub classes access to serializers already in constructor
*/
public abstract class BaseModel implements ArtifactProvider, Serializable {
protected static final String MANIFEST_ENTRY = "manifest.properties";
protected static final String FACTORY_NAME = "factory";
private static final String MANIFEST_VERSION_PROPERTY = "Manifest-Version";
private static final String COMPONENT_NAME_PROPERTY = "Component-Name";
private static final String VERSION_PROPERTY = "OpenNLP-Version";
private static final String TIMESTAMP_PROPERTY = "Timestamp";
private static final String LANGUAGE_PROPERTY = "Language";
public static final String TRAINING_CUTOFF_PROPERTY = "Training-Cutoff";
public static final String TRAINING_ITERATIONS_PROPERTY = "Training-Iterations";
public static final String TRAINING_EVENTHASH_PROPERTY = "Training-Eventhash";
private static String SERIALIZER_CLASS_NAME_PREFIX = "serializer-class-";
private Map<String, ArtifactSerializer> artifactSerializers = new HashMap<>();
protected Map<String, Object> artifactMap = new HashMap<>();
protected BaseToolFactory toolFactory;
private String componentName;
private boolean subclassSerializersInitiated = false;
private boolean finishedLoadingArtifacts = false;
private boolean isLoadedFromSerialized;
private BaseModel(String componentName, boolean isLoadedFromSerialized) {
this.isLoadedFromSerialized = isLoadedFromSerialized;
this.componentName = Objects.requireNonNull(componentName, "componentName must not be null!");
}
/**
* Initializes the current instance. The sub-class constructor should call the
* method {@link #checkArtifactMap()} to check the artifact map is OK.
* <p>
* Sub-classes will have access to custom artifacts and serializers provided
* by the factory.
*
* @param componentName
* the component name
* @param languageCode
* the language code
* @param manifestInfoEntries
* additional information in the manifest
* @param factory
* the factory
*/
protected BaseModel(String componentName, String languageCode,
Map<String, String> manifestInfoEntries, BaseToolFactory factory) {
this(componentName, false);
Objects.requireNonNull(languageCode, "languageCode must not be null");
createBaseArtifactSerializers(artifactSerializers);
Properties manifest = new Properties();
manifest.setProperty(MANIFEST_VERSION_PROPERTY, "1.0");
manifest.setProperty(LANGUAGE_PROPERTY, languageCode);
manifest.setProperty(VERSION_PROPERTY, Version.currentVersion().toString());
manifest.setProperty(TIMESTAMP_PROPERTY, Long.toString(System.currentTimeMillis()));
manifest.setProperty(COMPONENT_NAME_PROPERTY, componentName);
if (manifestInfoEntries != null) {
for (Map.Entry<String, String> entry : manifestInfoEntries.entrySet()) {
manifest.setProperty(entry.getKey(), entry.getValue());
}
}
artifactMap.put(MANIFEST_ENTRY, manifest);
finishedLoadingArtifacts = true;
if (factory != null) {
setManifestProperty(FACTORY_NAME, factory.getClass().getCanonicalName());
artifactMap.putAll(factory.createArtifactMap());
// new manifest entries
Map<String, String> entries = factory.createManifestEntries();
for (Entry<String, String> entry : entries.entrySet()) {
setManifestProperty(entry.getKey(), entry.getValue());
}
}
try {
initializeFactory();
} catch (InvalidFormatException e) {
throw new IllegalArgumentException("Could not initialize tool factory. ", e);
}
loadArtifactSerializers();
}
/**
* Initializes the current instance. The sub-class constructor should call the
* method {@link #checkArtifactMap()} to check the artifact map is OK.
*
* @param componentName
* the component name
* @param languageCode
* the language code
* @param manifestInfoEntries
* additional information in the manifest
*/
protected BaseModel(String componentName, String languageCode, Map<String, String> manifestInfoEntries) {
this(componentName, languageCode, manifestInfoEntries, null);
}
/**
* Initializes the current instance.
*
* @param componentName the component name
* @param in the input stream containing the model
*
* @throws IOException
*/
protected BaseModel(String componentName, InputStream in) throws IOException {
this(componentName, true);
loadModel(in);
}
protected BaseModel(String componentName, File modelFile) throws IOException {
this(componentName, true);
try (InputStream in = new BufferedInputStream(new FileInputStream(modelFile))) {
loadModel(in);
}
}
protected BaseModel(String componentName, URL modelURL) throws IOException {
this(componentName, true);
try (InputStream in = new BufferedInputStream(modelURL.openStream())) {
loadModel(in);
}
}
protected BaseModel() {
// Used when the model is not an OpenNLP model.
}
private void loadModel(InputStream in) throws IOException {
Objects.requireNonNull(in, "in must not be null");
createBaseArtifactSerializers(artifactSerializers);
if (!in.markSupported()) {
in = new BufferedInputStream(in);
}
// TODO: Discuss this solution, the buffering should
int MODEL_BUFFER_SIZE_LIMIT = Integer.MAX_VALUE;
in.mark(MODEL_BUFFER_SIZE_LIMIT);
final ZipInputStream zip = new ZipInputStream(in);
// The model package can contain artifacts which are serialized with 3rd party
// serializers which are configured in the manifest file. To be able to load
// the model the manifest must be read first, and afterwards all the artifacts
// can be de-serialized.
// The ordering of artifacts in a zip package is not guaranteed. The stream is first
// read until the manifest appears, reseted, and read again to load all artifacts.
boolean isSearchingForManifest = true;
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null && isSearchingForManifest) {
if ("manifest.properties".equals(entry.getName())) {
// TODO: Probably better to use the serializer here directly!
ArtifactSerializer factory = artifactSerializers.get("properties");
artifactMap.put(entry.getName(), factory.create(zip));
isSearchingForManifest = false;
}
zip.closeEntry();
}
initializeFactory();
loadArtifactSerializers();
// The Input Stream should always be reset-able because if markSupport returns
// false it is wrapped before hand into an Buffered InputStream
in.reset();
finishLoadingArtifacts(in);
checkArtifactMap();
}
private void initializeFactory() throws InvalidFormatException {
String factoryName = getManifestProperty(FACTORY_NAME);
if (factoryName == null) {
// load the default factory
Class<? extends BaseToolFactory> factoryClass = getDefaultFactory();
if (factoryClass != null) {
this.toolFactory = BaseToolFactory.create(factoryClass, this);
}
} else {
try {
this.toolFactory = BaseToolFactory.create(factoryName, this);
} catch (InvalidFormatException e) {
throw new IllegalArgumentException(e);
}
}
}
/**
* Sub-classes should override this method if their module has a default
* BaseToolFactory sub-class.
*
* @return the default {@link BaseToolFactory} for the module, or null if none.
*/
protected Class<? extends BaseToolFactory> getDefaultFactory() {
return null;
}
/**
* Loads the artifact serializers.
*/
private void loadArtifactSerializers() {
if (!subclassSerializersInitiated)
createArtifactSerializers(artifactSerializers);
subclassSerializersInitiated = true;
}
/**
* Finish loading the artifacts now that it knows all serializers.
*/
private void finishLoadingArtifacts(InputStream in)
throws IOException {
final ZipInputStream zip = new ZipInputStream(in);
Map<String, Object> artifactMap = new HashMap<>();
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null ) {
// Note: The manifest.properties file will be read here again,
// there should be no need to prevent that.
String entryName = entry.getName();
String extension = getEntryExtension(entryName);
ArtifactSerializer factory = artifactSerializers.get(extension);
String artifactSerializerClazzName =
getManifestProperty(SERIALIZER_CLASS_NAME_PREFIX + entryName);
if (artifactSerializerClazzName != null) {
factory = ExtensionLoader.instantiateExtension(ArtifactSerializer.class, artifactSerializerClazzName);
}
if (factory != null) {
artifactMap.put(entryName, factory.create(zip));
} else {
throw new InvalidFormatException("Unknown artifact format: " + extension);
}
zip.closeEntry();
}
this.artifactMap.putAll(artifactMap);
finishedLoadingArtifacts = true;
}
/**
* Extracts the "." extension from an entry name.
*
* @param entry the entry name which contains the extension
*
* @return the extension
*
* @throws InvalidFormatException if no extension can be extracted
*/
private String getEntryExtension(String entry) throws InvalidFormatException {
int extensionIndex = entry.lastIndexOf('.') + 1;
if (extensionIndex == -1 || extensionIndex >= entry.length())
throw new InvalidFormatException("Entry name must have type extension: " + entry);
return entry.substring(extensionIndex);
}
protected ArtifactSerializer getArtifactSerializer(String resourceName) {
try {
return artifactSerializers.get(getEntryExtension(resourceName));
} catch (InvalidFormatException e) {
throw new IllegalStateException(e);
}
}
protected static Map<String, ArtifactSerializer> createArtifactSerializers() {
Map<String, ArtifactSerializer> serializers = new HashMap<>();
GenericModelSerializer.register(serializers);
PropertiesSerializer.register(serializers);
DictionarySerializer.register(serializers);
serializers.put("txt", new ByteArraySerializer());
serializers.put("html", new ByteArraySerializer());
return serializers;
}
/**
* Registers all {@link ArtifactSerializer} for their artifact file name extensions.
* The registered {@link ArtifactSerializer} are used to create and serialize
* resources in the model package.
*
* Override this method to register custom {@link ArtifactSerializer}s.
*
* Note:
* Subclasses should generally invoke super.createArtifactSerializers at the beginning
* of this method.
*
* This method is called during construction.
*
* @param serializers the key of the map is the file extension used to lookup
* the {@link ArtifactSerializer}.
*/
protected void createArtifactSerializers(
Map<String, ArtifactSerializer> serializers) {
if (this.toolFactory != null)
serializers.putAll(this.toolFactory.createArtifactSerializersMap());
}
private void createBaseArtifactSerializers(
Map<String, ArtifactSerializer> serializers) {
serializers.putAll(createArtifactSerializers());
}
/**
* Validates the parsed artifacts. If something is not
* valid subclasses should throw an {@link InvalidFormatException}.
*
* Note:
* Subclasses should generally invoke super.validateArtifactMap at the beginning
* of this method.
*
* @throws InvalidFormatException
*/
protected void validateArtifactMap() throws InvalidFormatException {
if (!(artifactMap.get(MANIFEST_ENTRY) instanceof Properties))
throw new InvalidFormatException("Missing the " + MANIFEST_ENTRY + "!");
// First check version, everything else might change in the future
String versionString = getManifestProperty(VERSION_PROPERTY);
if (versionString != null) {
Version version;
try {
version = Version.parse(versionString);
}
catch (NumberFormatException e) {
throw new InvalidFormatException("Unable to parse model version '" + versionString + "'!", e);
}
// Version check is only performed if current version is not the dev/debug version
if (!Version.currentVersion().equals(Version.DEV_VERSION)) {
// Major and minor version must match, revision might be
// this check allows for the use of models of n minor release behind current minor release
if (Version.currentVersion().getMajor() != version.getMajor() ||
Version.currentVersion().getMinor() - 3 > version.getMinor()) {
throw new InvalidFormatException("Model version " + version + " is not supported by this ("
+ Version.currentVersion() + ") version of OpenNLP!");
}
// Reject loading a snapshot model with a non-snapshot version
if (!Version.currentVersion().isSnapshot() && version.isSnapshot()) {
throw new InvalidFormatException("Model version " + version
+ " is a snapshot - snapshot models are not supported by this non-snapshot version ("
+ Version.currentVersion() + ") of OpenNLP!");
}
}
}
else {
throw new InvalidFormatException("Missing " + VERSION_PROPERTY + " property in " +
MANIFEST_ENTRY + "!");
}
if (getManifestProperty(COMPONENT_NAME_PROPERTY) == null)
throw new InvalidFormatException("Missing " + COMPONENT_NAME_PROPERTY + " property in " +
MANIFEST_ENTRY + "!");
if (!getManifestProperty(COMPONENT_NAME_PROPERTY).equals(componentName))
throw new InvalidFormatException("The " + componentName + " cannot load a model for the " +
getManifestProperty(COMPONENT_NAME_PROPERTY) + "!");
if (getManifestProperty(LANGUAGE_PROPERTY) == null)
throw new InvalidFormatException("Missing " + LANGUAGE_PROPERTY + " property in " +
MANIFEST_ENTRY + "!");
// Validate the factory. We try to load it using the ExtensionLoader. It
// will return the factory, null or raise an exception
String factoryName = getManifestProperty(FACTORY_NAME);
if (factoryName != null) {
try {
if (ExtensionLoader.instantiateExtension(BaseToolFactory.class,
factoryName) == null) {
throw new InvalidFormatException(
"Could not load an user extension specified by the model: "
+ factoryName);
}
} catch (Exception e) {
throw new InvalidFormatException(
"Could not load an user extension specified by the model: "
+ factoryName, e);
}
}
// validate artifacts declared by the factory
if (toolFactory != null) {
toolFactory.validateArtifactMap();
}
}
/**
* Checks the artifact map.
* <p>
* A subclass should call this method from a constructor which accepts the individual
* artifact map items, to validate that these items form a valid model.
* <p>
* If the artifacts are not valid an IllegalArgumentException will be thrown.
*/
protected void checkArtifactMap() {
if (!finishedLoadingArtifacts)
throw new IllegalStateException(
"The method BaseModel.finishLoadingArtifacts(..) was not called by BaseModel sub-class.");
try {
validateArtifactMap();
} catch (InvalidFormatException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Retrieves the value to the given key from the manifest.properties
* entry.
*
* @param key
*
* @return the value
*/
public final String getManifestProperty(String key) {
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
return manifest.getProperty(key);
}
/**
* Sets a given value for a given key to the manifest.properties entry.
*
* @param key
* @param value
*/
protected final void setManifestProperty(String key, String value) {
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
manifest.setProperty(key, value);
}
public String getModelId() {
return getManifestProperty("model.id");
}
/**
* Retrieves the language code of the material which
* was used to train the model or x-unspecified if
* non was set.
*
* @return the language code of this model
*/
public final String getLanguage() {
return getManifestProperty(LANGUAGE_PROPERTY);
}
/**
* Retrieves the OpenNLP version which was used
* to create the model.
*
* @return the version
*/
public final Version getVersion() {
String version = getManifestProperty(VERSION_PROPERTY);
return Version.parse(version);
}
/**
* Serializes the model to the given {@link OutputStream}.
*
* @param out stream to write the model to
* @throws IOException
*/
@SuppressWarnings("unchecked")
public final String serialize(OutputStream out) throws IOException {
if (!subclassSerializersInitiated) {
throw new IllegalStateException(
"The method BaseModel.loadArtifactSerializers() was not called by BaseModel subclass constructor.");
}
// The model ID is generated here in order to reduce the code changes
// necessary. If the modelId is generated external to this function then
// anywhere this function is called must be changed. I just want to
// minimize the number of code changes to upgrades to newer OpenNLP
// versions are simplified.
final String modelId = UUID.randomUUID().toString();
// Write the model ID to the model's properties.
setManifestProperty("model.id", modelId);
for (Entry<String, Object> entry : artifactMap.entrySet()) {
final String name = entry.getKey();
final Object artifact = entry.getValue();
if (artifact instanceof SerializableArtifact) {
SerializableArtifact serializableArtifact = (SerializableArtifact) artifact;
String artifactSerializerName = serializableArtifact
.getArtifactSerializerClass().getName();
setManifestProperty(SERIALIZER_CLASS_NAME_PREFIX + name,
artifactSerializerName);
}
}
ZipOutputStream zip = new ZipOutputStream(out);
for (Entry<String, Object> entry : artifactMap.entrySet()) {
String name = entry.getKey();
zip.putNextEntry(new ZipEntry(name));
Object artifact = entry.getValue();
ArtifactSerializer serializer = getArtifactSerializer(name);
// If model is serialize-able always use the provided serializer
if (artifact instanceof SerializableArtifact) {
SerializableArtifact serializableArtifact = (SerializableArtifact) artifact;
String artifactSerializerName =
serializableArtifact.getArtifactSerializerClass().getName();
serializer = ExtensionLoader.instantiateExtension(ArtifactSerializer.class, artifactSerializerName);
}
if (serializer == null) {
throw new IllegalStateException("Missing serializer for " + name);
}
serializer.serialize(artifactMap.get(name), zip);
zip.closeEntry();
}
zip.finish();
zip.flush();
return modelId;
}
public final void serialize(File model) throws IOException {
try (OutputStream out = new BufferedOutputStream(new FileOutputStream(model))) {
serialize(out);
}
}
public final void serialize(Path model) throws IOException {
serialize(model.toFile());
}
@SuppressWarnings("unchecked")
public <T> T getArtifact(String key) {
Object artifact = artifactMap.get(key);
if (artifact == null)
return null;
return (T) artifact;
}
public boolean isLoadedFromSerialized() {
return isLoadedFromSerialized;
}
// These methods are required to serialize/deserialize the model because
// many of the included objects in this model are not Serializable.
// An alternative to this solution is to make all included objects
// Serializable and remove the writeObject and readObject methods.
// This will allow the usage of final for fields that should not change.
private void writeObject(EncryptedDataOutputStream out) throws IOException {
out.writeEncryptedUTF(componentName);
this.serialize(out);
}
private void readObject(final ObjectInputStream in) throws IOException {
isLoadedFromSerialized = true;
artifactSerializers = new HashMap<>();
artifactMap = new HashMap<>();
componentName = in.readUTF();
this.loadModel(in);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ByteArraySerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class ByteArraySerializer implements ArtifactSerializer<byte[]> {
public byte[] create(InputStream in) throws IOException {
return ModelUtil.read(in);
}
public void serialize(byte[] artifact, OutputStream out) throws IOException {
out.write(artifact);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ChunkerModelSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.parser.ParserChunkerFactory;
import opennlp.tools.util.Version;
public class ChunkerModelSerializer implements ArtifactSerializer<ChunkerModel> {
public ChunkerModel create(InputStream in) throws IOException {
ChunkerModel model = new ChunkerModel(new UncloseableInputStream(in));
Version version = model.getVersion();
if (version.getMajor() == 1 && version.getMinor() == 5) {
model = new ChunkerModel(model.getLanguage(), model.getChunkerModel(), new ParserChunkerFactory());
}
return model;
}
public void serialize(ChunkerModel artifact, OutputStream out)
throws IOException {
artifact.serialize(out);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/DictionarySerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import opennlp.tools.dictionary.Dictionary;
public class DictionarySerializer implements ArtifactSerializer<Dictionary> {
public Dictionary create(InputStream in) throws IOException {
return new Dictionary(in);
}
public void serialize(Dictionary dictionary, OutputStream out) throws IOException {
dictionary.serialize(out);
}
static void register(Map<String, ArtifactSerializer> factories) {
factories.put("dictionary", new DictionarySerializer());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/GenericModelSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.BinaryFileDataReader;
import opennlp.tools.ml.model.GenericModelReader;
public class GenericModelSerializer implements ArtifactSerializer<AbstractModel> {
public AbstractModel create(InputStream in) throws IOException {
return new GenericModelReader(new BinaryFileDataReader(in)).getModel();
}
public void serialize(AbstractModel artifact, OutputStream out) throws IOException {
ModelUtil.writeModel(artifact, out);
}
public static void register(Map<String, ArtifactSerializer> factories) {
factories.put("model", new GenericModelSerializer());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ModelType.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
public enum ModelType {
MAXENT,
PERCEPTRON,
PERCEPTRON_SEQUENCE
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/ModelUtil.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.maxent.GISTrainer;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.GenericModelWriter;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.util.TrainingParameters;
/**
* Utility class for handling of {@link MaxentModel}s.
*/
public final class ModelUtil {
private ModelUtil() {
// not intended to be instantiated
}
/**
* Writes the given model to the given {@link OutputStream}.
*
* This methods does not closes the provided stream.
*
* @param model the model to be written
* @param out the stream the model should be written to
*
* @throws IOException
* @throws IllegalArgumentException in case one of the parameters is null
*/
public static void writeModel(MaxentModel model, final OutputStream out)
throws IOException, IllegalArgumentException {
Objects.requireNonNull(model, "model parameter must not be null");
Objects.requireNonNull(out, "out parameter must not be null");
GenericModelWriter modelWriter = new GenericModelWriter((AbstractModel) model,
new EncryptedDataOutputStream(new OutputStream() {
@Override
public void write(int b) throws IOException {
out.write(b);
}
}));
modelWriter.persist();
}
/**
* Checks if the expected outcomes are all contained as outcomes in the given model.
*
* @param model
* @param expectedOutcomes
*
* @return true if all expected outcomes are the only outcomes of the model.
*/
public static boolean validateOutcomes(MaxentModel model, String... expectedOutcomes) {
boolean result = true;
if (expectedOutcomes.length == model.getNumOutcomes()) {
Set<String> expectedOutcomesSet = new HashSet<>();
expectedOutcomesSet.addAll(Arrays.asList(expectedOutcomes));
for (int i = 0; i < model.getNumOutcomes(); i++) {
if (!expectedOutcomesSet.contains(model.getOutcome(i))) {
result = false;
break;
}
}
}
else {
result = false;
}
return result;
}
/**
* Writes the provided {@link InputStream} into a byte array
* which is returned
*
* @param in stream to read data for the byte array from
* @return byte array with the contents of the stream
*
* @throws IOException if an exception is thrown while reading
* from the provided {@link InputStream}
*/
public static byte[] read(InputStream in) throws IOException {
ByteArrayOutputStream byteArrayOut = new ByteArrayOutputStream();
int length;
byte[] buffer = new byte[1024];
while ((length = in.read(buffer)) > 0) {
byteArrayOut.write(buffer, 0, length);
}
byteArrayOut.close();
return byteArrayOut.toByteArray();
}
public static void addCutoffAndIterations(Map<String, String> manifestInfoEntries,
int cutoff, int iterations) {
manifestInfoEntries.put(BaseModel.TRAINING_CUTOFF_PROPERTY, Integer.toString(cutoff));
manifestInfoEntries.put(BaseModel.TRAINING_ITERATIONS_PROPERTY, Integer.toString(iterations));
}
/**
* Creates the default training parameters in case they are not provided.
*
* Note: Do not use this method, internal use only!
*
*
* @return training parameters instance
*/
public static TrainingParameters createDefaultTrainingParameters() {
TrainingParameters mlParams = new TrainingParameters();
mlParams.put(TrainingParameters.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
mlParams.put(TrainingParameters.ITERATIONS_PARAM, 100);
mlParams.put(TrainingParameters.CUTOFF_PARAM, 5);
return mlParams;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/POSModelSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.ml.BeamSearch;
import opennlp.tools.postag.POSModel;
import opennlp.tools.util.Version;
public class POSModelSerializer implements ArtifactSerializer<POSModel> {
public POSModel create(InputStream in) throws IOException {
POSModel posModel = new POSModel(new UncloseableInputStream(in));
// The 1.6.x models write the non-default beam size into the model itself.
// In 1.5.x the parser configured the beam size when the model was loaded,
// this is not possible anymore with the new APIs
Version version = posModel.getVersion();
if (version.getMajor() == 1 && version.getMinor() == 5) {
if (posModel.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER) == null) {
Map<String, String> manifestInfoEntries = new HashMap<>();
// The version in the model must be correct or otherwise version
// dependent code branches in other places fail
manifestInfoEntries.put("OpenNLP-Version", "1.5.0");
posModel = new POSModel(posModel.getLanguage(), posModel.getPosModel(), 10,
manifestInfoEntries, posModel.getFactory());
}
}
return posModel;
}
public void serialize(POSModel artifact, OutputStream out)
throws IOException {
artifact.serialize(out);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/PropertiesSerializer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.Properties;
class PropertiesSerializer implements ArtifactSerializer<Properties> {
public Properties create(InputStream in) throws IOException {
Properties properties = new Properties();
properties.load(in);
return properties;
}
public void serialize(Properties properties, OutputStream out) throws IOException {
properties.store(out, "");
}
static void register(Map<String, ArtifactSerializer> factories) {
factories.put("properties", new PropertiesSerializer());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/SerializableArtifact.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
public interface SerializableArtifact {
/**
* Retrieves the class which can serialize and recreate this artifact.
* <br>
* Note:
* The serializer class must have a public zero argument constructor or
* an exception is thrown during model serialization/loading.
*
* @return the corresponding ArtifactSerializer class.
*/
Class<?> getArtifactSerializerClass();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/model/UncloseableInputStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.model;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* An {@link InputStream} which cannot be closed.
*/
public class UncloseableInputStream extends FilterInputStream {
public UncloseableInputStream(InputStream in) {
super(in);
}
/**
* This method does not has any effect the {@link InputStream}
* cannot be closed.
*/
@Override
public void close() throws IOException {
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/AggregateCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
public class AggregateCharSequenceNormalizer implements CharSequenceNormalizer {
private final CharSequenceNormalizer[] normalizers;
public AggregateCharSequenceNormalizer(CharSequenceNormalizer ... normalizers) {
this.normalizers = normalizers;
}
public CharSequence normalize (CharSequence text) {
for (CharSequenceNormalizer normalizers :
normalizers) {
text = normalizers.normalize(text);
}
return text;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/CharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
public interface CharSequenceNormalizer {
CharSequence normalize(CharSequence text);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/EmojiCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
import java.util.regex.Pattern;
public class EmojiCharSequenceNormalizer implements CharSequenceNormalizer {
private static final EmojiCharSequenceNormalizer INSTANCE = new EmojiCharSequenceNormalizer();
public static EmojiCharSequenceNormalizer getInstance() {
return INSTANCE;
}
private static final Pattern EMOJI_REGEX =
Pattern.compile("[\\uD83C-\\uDBFF\\uDC00-\\uDFFF]+");
public CharSequence normalize (CharSequence text) {
String modified = EMOJI_REGEX.matcher(text).replaceAll(" ");
return modified;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/NumberCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
import java.util.regex.Pattern;
public class NumberCharSequenceNormalizer implements CharSequenceNormalizer {
private static final Pattern NUMBER_REGEX = Pattern.compile("\\d+");
private static final NumberCharSequenceNormalizer INSTANCE = new NumberCharSequenceNormalizer();
public static NumberCharSequenceNormalizer getInstance() {
return INSTANCE;
}
public CharSequence normalize (CharSequence text) {
return NUMBER_REGEX.matcher(text).replaceAll(" ");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/ShrinkCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
import java.util.regex.Pattern;
public class ShrinkCharSequenceNormalizer implements CharSequenceNormalizer {
private static final Pattern REPEATED_CHAR_REGEX = Pattern.compile("(.)\\1{2,}",
Pattern.CASE_INSENSITIVE);
private static final Pattern SPACE_REGEX = Pattern.compile("\\s{2,}",
Pattern.CASE_INSENSITIVE);
private static final ShrinkCharSequenceNormalizer INSTANCE = new ShrinkCharSequenceNormalizer();
public static ShrinkCharSequenceNormalizer getInstance() {
return INSTANCE;
}
public CharSequence normalize (CharSequence text) {
text = SPACE_REGEX.matcher(text).replaceAll(" ");
return REPEATED_CHAR_REGEX.matcher(text).replaceAll("$1$1").trim();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/TwitterCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
import java.util.regex.Pattern;
public class TwitterCharSequenceNormalizer implements CharSequenceNormalizer {
private static final Pattern HASH_USER_REGEX =
Pattern.compile("[#@]\\S+");
private static final Pattern RT_REGEX =
Pattern.compile("\\b(rt[ :])+", Pattern.CASE_INSENSITIVE);
private static final Pattern FACE_REGEX =
Pattern.compile("[:;x]-?[()dop]", Pattern.CASE_INSENSITIVE);
private static final Pattern LAUGH_REGEX =
Pattern.compile("([hj])+([aieou])+(\\1+\\2+)+", Pattern.CASE_INSENSITIVE);
private static final TwitterCharSequenceNormalizer INSTANCE = new TwitterCharSequenceNormalizer();
public static TwitterCharSequenceNormalizer getInstance() {
return INSTANCE;
}
public CharSequence normalize (CharSequence text) {
String modified = HASH_USER_REGEX.matcher(text).replaceAll(" ");
modified = RT_REGEX.matcher(modified).replaceAll(" ");
modified = FACE_REGEX.matcher(modified).replaceAll(" ");
modified = LAUGH_REGEX.matcher(modified).replaceAll("$1$2$1$2");
return modified;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/normalizer/UrlCharSequenceNormalizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.normalizer;
import java.util.regex.Pattern;
public class UrlCharSequenceNormalizer implements CharSequenceNormalizer {
private static final Pattern URL_REGEX =
Pattern.compile("https?://[-_.?&~;+=/#0-9A-Za-z]+");
private static final Pattern MAIL_REGEX =
Pattern.compile("[-_.0-9A-Za-z]+@[-_0-9A-Za-z]+[-_.0-9A-Za-z]+");
private static final UrlCharSequenceNormalizer INSTANCE = new UrlCharSequenceNormalizer();
public static UrlCharSequenceNormalizer getInstance() {
return INSTANCE;
}
public CharSequence normalize (CharSequence text) {
String modified = URL_REGEX.matcher(text).replaceAll(" ");
return MAIL_REGEX.matcher(modified).replaceAll(" ");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/DoubleArrayVector.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
class DoubleArrayVector implements WordVector {
private double[] vector;
DoubleArrayVector(double[] vector) {
this.vector = vector;
}
@Override
public WordVectorType getDataType() {
return WordVectorType.DOUBLE;
}
@Override
public float getAsFloat(int index) {
return (float) getAsDouble(index);
}
@Override
public double getAsDouble(int index) {
return vector[index];
}
@Override
public float[] toFloatArray() {
float[] floatVector = new float[vector.length];
for (int i = 0; i < floatVector.length ; i++) {
floatVector[i] = (float) vector[i];
}
return floatVector;
}
@Override
public double[] toDoubleArray() {
return toDoubleBuffer().array();
}
@Override
public FloatBuffer toFloatBuffer() {
return FloatBuffer.wrap(toFloatArray()).asReadOnlyBuffer();
}
@Override
public DoubleBuffer toDoubleBuffer() {
return DoubleBuffer.wrap(vector);
}
@Override
public int dimension() {
return vector.length;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/FloatArrayVector.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
class FloatArrayVector implements WordVector {
private float[] vector;
FloatArrayVector(float[] vector) {
this.vector = vector;
}
@Override
public WordVectorType getDataType() {
return WordVectorType.FLOAT;
}
@Override
public float getAsFloat(int index) {
return vector[index];
}
@Override
public double getAsDouble(int index) {
return getAsFloat(index);
}
@Override
public float[] toFloatArray() {
return toFloatBuffer().array();
}
@Override
public double[] toDoubleArray() {
double[] doubleVector = new double[vector.length];
for (int i = 0; i < doubleVector.length ; i++) {
doubleVector[i] = vector[i];
}
return doubleVector;
}
@Override
public FloatBuffer toFloatBuffer() {
return FloatBuffer.wrap(vector).asReadOnlyBuffer();
}
@Override
public DoubleBuffer toDoubleBuffer() {
return DoubleBuffer.wrap(toDoubleArray());
}
@Override
public int dimension() {
return vector.length;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/Glove.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.util.java.Experimental;
/**
* <p>
* Warning: Experimental new feature, see OPENNLP-1144 for details, the API might be changed anytime.
*/
@Experimental
public class Glove {
private Glove() {
}
/**
* Parses a glove vector plain text file.
* <p>
* Warning: Experimental new feature, see OPENNLP-1144 for details, the API might be changed anytime.
*
* @param in
* @return
* @throws IOException
*/
@Experimental
public static WordVectorTable parse(InputStream in) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8),
1024 * 1024);
Map<String, WordVector> vectors = new HashMap<>();
int dimension = -1;
String line;
while ((line = reader.readLine()) != null) {
String[] parts = line.split(" ");
if (dimension == -1) {
dimension = parts.length - 1;
}
else if (dimension != parts.length - 1) {
throw new IOException("Vector dimension must be constant!");
}
String token = parts[0];
float[] vector = new float[dimension];
for (int i = 0; i < vector.length; i++) {
vector[i] = Float.parseFloat(parts[i + 1]);
}
vectors.put(token, new FloatArrayVector(vector));
}
return new MapWordVectorTable(Collections.unmodifiableMap(vectors));
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/MapWordVectorTable.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import java.util.Map;
class MapWordVectorTable implements WordVectorTable {
private final Map<String, WordVector> vectors;
MapWordVectorTable(Map<String, WordVector> vectors) {
this.vectors = vectors;
}
@Override
public WordVector get(CharSequence token) {
return vectors.get(token.toString());
}
@Override
public int size() {
return vectors.size();
}
@Override
public int dimension() {
if (vectors.size() > 0) {
return vectors.values().iterator().next().dimension();
}
else {
return -1;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/WordVector.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import opennlp.tools.util.java.Experimental;
/**
* A word vector.
*
* <p>
* Warning: Experimental new feature, see OPENNLP-1144 for details, the API might be changed anytime.
*/
@Experimental
public interface WordVector {
WordVectorType getDataType();
float getAsFloat(int index);
double getAsDouble(int index);
float[] toFloatArray();
double[] toDoubleArray();
FloatBuffer toFloatBuffer();
DoubleBuffer toDoubleBuffer();
int dimension();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/WordVectorTable.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import opennlp.tools.util.java.Experimental;
/**
* A table that maps tokens to word vectors.
*
* <p>
* Warning: Experimental new feature, see OPENNLP-1144 for details, the API might be changed anytime.
*/
@Experimental
public interface WordVectorTable {
WordVector get(CharSequence token);
int size();
int dimension();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/util/wordvector/WordVectorType.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.wordvector;
import opennlp.tools.util.java.Experimental;
@Experimental
public enum WordVectorType {
FLOAT,
DOUBLE;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-pipeline/1.1.0/ai/idylnlp
|
java-sources/ai/idylnlp/idylnlp-pipeline/1.1.0/ai/idylnlp/pipeline/NerPipeline.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.pipeline;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.neovisionaries.i18n.LanguageCode;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.model.entity.Entity;
import ai.idylnlp.model.exceptions.EntityFinderException;
import ai.idylnlp.model.exceptions.ModelLoaderException;
import ai.idylnlp.model.manifest.ModelManifest;
import ai.idylnlp.model.manifest.ModelManifestUtils;
import ai.idylnlp.model.manifest.StandardModelManifest;
import ai.idylnlp.model.nlp.DuplicateEntityStrategy;
import ai.idylnlp.model.nlp.EntityComparator;
import ai.idylnlp.model.nlp.EntityOrder;
import ai.idylnlp.model.nlp.EntitySanitizer;
import ai.idylnlp.model.nlp.SentenceDetector;
import ai.idylnlp.model.nlp.Tokenizer;
import ai.idylnlp.model.nlp.ner.EntityExtractionRequest;
import ai.idylnlp.model.nlp.ner.EntityExtractionResponse;
import ai.idylnlp.model.nlp.ner.EntityRecognizer;
import ai.idylnlp.model.nlp.pipeline.Pipeline;
import ai.idylnlp.model.stats.StatsReporter;
import ai.idylnlp.nlp.recognizer.OpenNLPEntityRecognizer;
import ai.idylnlp.nlp.recognizer.configuration.OpenNLPEntityRecognizerConfiguration;
import ai.idylnlp.nlp.sentence.BreakIteratorSentenceDetector;
import ai.idylnlp.nlp.tokenizers.BreakIteratorTokenizer;
import ai.idylnlp.opennlp.custom.modelloader.LocalModelLoader;
import ai.idylnlp.opennlp.custom.modelloader.ModelLoader;
import ai.idylnlp.opennlp.custom.validators.TrueModelValidator;
import ai.idylnlp.zoo.IdylNLPModelZoo;
import opennlp.tools.namefind.TokenNameFinderModel;
/**
* An NLP pipeline for named-entity recognition (NER). The pipeline performs
* all required operations for extracting entities from natural language text.
* An implementation of {@link Pipeline}.
*
* @author Mountain Fog, Inc.
*
*/
public class NerPipeline implements Pipeline<EntityExtractionResponse> {
private static final Logger LOGGER = LogManager.getLogger(NerPipeline.class);
private SentenceDetector sentenceDetector;
private Tokenizer tokenizer;
private List<EntityRecognizer> entityRecognizers;
private List<EntitySanitizer> entitySanitizers;
private StatsReporter statsReporter;
private DuplicateEntityStrategy duplicateEntityStrategy;
private LanguageCode languageCode;
private EntityOrder entityOrder;
private IdylNLPModelZoo zoo;
private Set<String> entityTypes;
private NerPipeline(
SentenceDetector sentenceDetector,
Tokenizer tokenizer,
List<EntityRecognizer> entityRecognizers,
List<EntitySanitizer> entitySanitizers,
StatsReporter statsReporter,
DuplicateEntityStrategy duplicateEntityStrategy,
LanguageCode languageCode,
EntityOrder entityOrder,
IdylNLPModelZoo zoo,
Set<String> entityTypes) {
this.sentenceDetector = sentenceDetector;
this.tokenizer = tokenizer;
this.entityRecognizers = entityRecognizers;
this.entitySanitizers = entitySanitizers;
this.statsReporter = statsReporter;
this.duplicateEntityStrategy = duplicateEntityStrategy;
this.languageCode = languageCode;
this.entityOrder = entityOrder;
this.zoo = zoo;
this.entityTypes = entityTypes;
}
/**
* Facilitates the construction of an {@link NerPipeline}.
*
* @author Mountain Fog, Inc.
*
*/
public static class NerPipelineBuilder {
private SentenceDetector sentenceDetector;
private Tokenizer tokenizer;
private List<EntityRecognizer> entityRecognizers;
private List<EntitySanitizer> entitySanitizers;
private StatsReporter statsReporter;
private DuplicateEntityStrategy duplicateEntityStrategy = DuplicateEntityStrategy.USE_HIGHEST_CONFIDENCE;
private EntityOrder entityOrder = EntityOrder.CONFIDENCE;
private IdylNLPModelZoo zoo;
private Set<String> entityTypes;
/**
* Sets the {@link SentenceDetector} for the pipeline.
* @param sentenceDetector The {@link SentenceDetector} for the pipeline.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withSentenceDetector(SentenceDetector sentenceDetector) {
this.sentenceDetector = sentenceDetector;
return this;
}
/**
* Sets the {@link Tokenizer} for the pipeline.
* @param tokenizer The {@link Tokenizer} for the pipeline.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withTokenizer(Tokenizer tokenizer) {
this.tokenizer = tokenizer;
return this;
}
/**
* Sets the entity recognizers for the pipeline.
* @param entityRecognizers The entity {@link EntityRecognizer recognizers}.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withEntityRecognizers(List<EntityRecognizer> entityRecognizers) {
this.entityRecognizers = entityRecognizers;
return this;
}
/**
* Sets the entity sanitizers for the pipeline.
* @param entitySanitizers The entity {@link EntitySanitzer sanitzers}.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withEntitySanitizers(List<EntitySanitizer> entitySanitizers) {
this.entitySanitizers = entitySanitizers;
return this;
}
/**
* Sets the {@link StatsReporter}.
* @param statsReporter The {@link StatsReporter}.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withStatsReporter(StatsReporter statsReporter) {
this.statsReporter = statsReporter;
return this;
}
/**
* Sets the duplicate entity strategy for the pipeline.
* @param duplicateEntityStrategy The duplicate entity {@link DuplicateEntityStrategy strategy}.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withDuplicateEntityStrategy(DuplicateEntityStrategy duplicateEntityStrategy) {
this.duplicateEntityStrategy = duplicateEntityStrategy;
return this;
}
/**
* Sets the return order for extracted entities.
* @param entityOrder The return {@link EntityOrder} for extracted entities.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withEntityOrder(EntityOrder entityOrder) {
this.entityOrder = entityOrder;
return this;
}
/**
* Sets the model zoo client.
* @param zoo A {@link IdylNLPModelZoo}.
* @return The {@link NerPipeline pipeline} so calls can be chained.
*/
public NerPipelineBuilder withIdylNLPModelZoo(IdylNLPModelZoo zoo) {
this.zoo = zoo;
return this;
}
/**
* Sets the entity types to extract.
* @param entityTypes The entity types to extract.
* @return The entity types to extract.
*/
public NerPipelineBuilder withEntityTypes(Set<String> entityTypes) {
this.entityTypes = entityTypes;
return this;
}
/**
* Builds the pipeline.
* @return A {@link NerPipeline pipeline}.
*/
public NerPipeline build(LanguageCode languageCode) {
if(sentenceDetector == null) {
// Get a default sentence detector for the given language.
sentenceDetector = new BreakIteratorSentenceDetector(languageCode);
}
if(tokenizer == null) {
// Get a default tokenizer for the given language.
tokenizer = new BreakIteratorTokenizer(languageCode);
}
if(entityRecognizers == null) {
// Get a default entity recognizer for the given language.
final File file = new File(NerPipeline.class.getResource("/models/" + languageCode.getAlpha3().toString().toLowerCase() + "/").getFile());
final String modelDirectory = file.getAbsolutePath();
LOGGER.info("Using model directory {}", modelDirectory);
// TODO: Let the validator be passed in.
final ModelValidator modelValidator = new TrueModelValidator();
final ModelLoader<TokenNameFinderModel> modelLoader = new LocalModelLoader<TokenNameFinderModel>(modelValidator, modelDirectory);
final List<ModelManifest> modelManifests = ModelManifestUtils.getModelManifests(modelDirectory);
final Map<String, Map<LanguageCode, Set<StandardModelManifest>>> models = new HashMap<>();
for(ModelManifest modelManifest : modelManifests) {
Set<StandardModelManifest> englishModelManifests = new HashSet<StandardModelManifest>();
englishModelManifests.add((StandardModelManifest) modelManifest);
Map<LanguageCode, Set<StandardModelManifest>> languagesToManifests = new HashMap<>();
languagesToManifests.put(languageCode, englishModelManifests);
models.put(modelManifest.getType(), languagesToManifests);
}
OpenNLPEntityRecognizerConfiguration config = new OpenNLPEntityRecognizerConfiguration.Builder()
.withEntityModelLoader(modelLoader)
.withEntityModels(models)
.build();
OpenNLPEntityRecognizer entityRecognizer = new OpenNLPEntityRecognizer(config);
entityRecognizers = new ArrayList<EntityRecognizer>();
entityRecognizers.add(entityRecognizer);
}
if(entitySanitizers == null) {
entitySanitizers = new ArrayList<EntitySanitizer>();
}
if(entityTypes == null) {
// All entity types.
entityTypes = new HashSet<>();
}
NerPipeline pipeline = new NerPipeline(
sentenceDetector, tokenizer, entityRecognizers, entitySanitizers,
statsReporter, duplicateEntityStrategy, languageCode, entityOrder, zoo, entityTypes);
return pipeline;
}
}
@Override
public EntityExtractionResponse run(String text) {
Set<Entity> entities = new HashSet<Entity>();
boolean successful = true;
long extractionTime = 0;
try {
final String[] sentences = sentenceDetector.sentDetect(text);
for(String sentence : sentences) {
final String[] tokens = tokenizer.tokenize(sentence);
// Extract the entities using all of the NERs in the list.
for(EntityRecognizer entityRecognizer : entityRecognizers) {
LOGGER.debug("Processing tokenized text with entity recognizer {}.", entityRecognizer.toString());
EntityExtractionRequest request = new EntityExtractionRequest(tokens);
request.setDuplicateEntityStrategy(duplicateEntityStrategy);
// TODO: Expose other parameters of the EntityExtractionRequest such as entity confidence.
EntityExtractionResponse response = entityRecognizer.extractEntities(request);
entities.addAll(response.getEntities());
extractionTime += response.getExtractionTime();
}
if(statsReporter != null) {
// Increment the count of entity extraction requests.
statsReporter.increment(StatsReporter.EXTRACTION_REQUESTS, entities.size());
}
// Sanitize the entities.
for(EntitySanitizer sanitizer : entitySanitizers) {
entities = sanitizer.sanitizeEntities(entities);
}
// Handle the duplicate entities per the strategy.
if(duplicateEntityStrategy == DuplicateEntityStrategy.USE_HIGHEST_CONFIDENCE) {
// Remove duplicate entities having a lower confidence.
entities = removeDuplicateEntities(entities);
}
// Sort the entities before returning.
entities = EntityComparator.sort(entities, entityOrder);
}
} catch (ModelLoaderException | EntityFinderException ex) {
LOGGER.error("Unable to process through the Idyl pipeline.", ex);
// Return null on receipt of an error. This is here
// because otherwise an incomplete list of
// entities could potentially be returned when an
// exception is thrown.
entities = null;
successful = false;
}
return new EntityExtractionResponse(entities, extractionTime, successful);
}
/**
* Remove duplicate entities having a lower confidence.
* @param entities A set of {@link Entity} objects.
* @return A set of {@link Entity} objects without duplicate entities.
*/
public static Set<Entity> removeDuplicateEntities(Set<Entity> entities) {
Set<Entity> removedDuplicateEntities = new LinkedHashSet<>();
for(Entity entity : entities) {
Set<Entity> entitiesWithSameText = new HashSet<Entity>();
// Is there another entity in the set that has this entity's text?
for(Entity entity2 : entities) {
if(entity.getText().equalsIgnoreCase(entity2.getText())) {
entitiesWithSameText.add(entity2);
}
}
// Should always be at least one (the same entity).
if(entitiesWithSameText.size() == 1) {
removedDuplicateEntities.addAll(entitiesWithSameText);
} else {
// Find the one with the highest confidence.
double highestConfidence = 0;
Entity entityWithHighestConfidence = null;
for(Entity entity3 : entitiesWithSameText) {
if(entity3.getConfidence() > highestConfidence) {
highestConfidence = entity3.getConfidence();
entityWithHighestConfidence = entity3;
}
}
removedDuplicateEntities.add(entityWithHighestConfidence);
}
}
return removedDuplicateEntities;
}
/**
* Gets the sentence detector used by the pipeline.
* @return The {@link SentenceDetector} used by the pipeline.
*/
public SentenceDetector getSentenceDetector() {
return sentenceDetector;
}
/**
* Gets the tokenizer used by the pipeline.
* @return The {@link Tokenizer} used by the pipeline.
*/
public Tokenizer getTokenizer() {
return tokenizer;
}
/**
* Gets the entity recognizers.
* @return A list of entity {@link EntityRecognizer recognizers}.
*/
public List<EntityRecognizer> getEntityRecognizers() {
return entityRecognizers;
}
/**
* Gets the entity sanitizers.
* @return A list of entity {@link EntitySanizer sanitizers}.
*/
public List<EntitySanitizer> getEntitySanitiziers() {
return entitySanitizers;
}
/**
* Gets the {@link StatsReporter}.
* @return The {@link StatsReporter}.
*/
public StatsReporter getStatsReporter() {
return statsReporter;
}
/**
* Gets the duplicate entity strategy.
* @return The duplicate entity {@link DuplicateEntityStrategy strategy}.
*/
public DuplicateEntityStrategy getDuplicateEntityStrategy() {
return duplicateEntityStrategy;
}
/**
* Gets the language code for the pipeline.
* @return The {@link LanguageCode} for the pipeline.
*/
public LanguageCode getLanguageCode() {
return languageCode;
}
/**
* Gets the entity order for the pipeline.
* @return The {@link EntityOrder} for the pipeline.
*/
public EntityOrder getEntityOrder() {
return entityOrder;
}
/**
* Gets the model zoo client for the pipeline.
* @return The {@link IdylNLPModelZoo} client for the pipeline.
*/
public IdylNLPModelZoo getZoo() {
return zoo;
}
/**
* Gets the entity types for the pipeline.
* @return The entity types for the pipeline.
*/
public Set<String> getEntityTypes() {
return entityTypes;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing/ObjectTest.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.testing;
import java.lang.reflect.ParameterizedType;
import org.junit.Test;
import nl.jqno.equalsverifier.EqualsVerifier;
import nl.jqno.equalsverifier.Warning;
/**
* Provides a test for <code>equals</code> and <code>hashCode</code>
* to check for contract compliance.
* @author Mountain Fog, Inc.
*/
public abstract class ObjectTest<T extends Object> {
@Test
public void equalsContract() {
EqualsVerifier
.forClass(getTestedClass())
.suppress(Warning.NONFINAL_FIELDS)
.suppress(Warning.INHERITED_DIRECTLY_FROM_OBJECT)
// TODO: Remove this line.
.suppress(Warning.ALL_FIELDS_SHOULD_BE_USED)
.verify();
}
@SuppressWarnings("unchecked")
private Class<T> getTestedClass() {
return (Class<T>)
((ParameterizedType)getClass().getGenericSuperclass())
.getActualTypeArguments()[0];
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing/TestUtils.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.testing;
import java.io.File;
public class TestUtils {
/**
* Gets the absolute path to the test resources.
* @return The absolute path to the <code>src/test/resources</code> directory.
*/
public static String getTestResourcesAbsolutePath() {
return new File("src/test/resources/").getAbsolutePath();
}
/**
* Returns <code>true</code> if running on Windows.
* @return <code>True</code> if running on Windows.
*/
public static boolean isWindows() {
return System.getProperty("os.name").toLowerCase().startsWith("win");
}
/**
* Returns a value based on the operating system.
* @param linuxValue The value to return when on Linux.
* @param windowsValue The value to return when on Windows.
* @return The <code>linuxValue</code> when on Linux and the <code>windowsValue</code>
* when on Windows.
*/
public static String setOsDependentValue(String linuxValue, String windowsValue) {
if(isWindows()) {
return windowsValue;
} else {
return linuxValue;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing/markers/ExternalData.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.testing.markers;
/**
* Marker class for tests that require external data.
*
*/
public interface ExternalData {
}
|
0
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing
|
java-sources/ai/idylnlp/idylnlp-testing/1.1.0/ai/idylnlp/testing/markers/HighMemoryUsage.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.testing.markers;
/**
* Marker class for tests that use large
* amounts of memory.
*
*/
public interface HighMemoryUsage {
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition/TrainingDefinitionFileReader.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.training.definition;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import ai.idylnlp.training.definition.model.TrainingDefinitionException;
import ai.idylnlp.training.definition.model.TrainingDefinitionReader;
import ai.idylnlp.training.definition.model.TrainingDefinitionValidationResult;
import ai.idylnlp.training.definition.xml.Trainingdefinition;
/**
* Implementation of {@link TrainingDefinitionReader} that reads a training definition
* XML file and exposes the deserialized {@link Trainingdefinition} object.
*
* @author Mountain Fog, Inc.
*
*/
public class TrainingDefinitionFileReader implements TrainingDefinitionReader {
private static final Logger LOGGER = LogManager.getLogger(TrainingDefinitionFileReader.class);
private Trainingdefinition trainingDefinition;
private File file;
/**
* Creates a new training definition file reader.
* @param file The {@link File file} containing the training definition.
* @throws TrainingDefinitionException Thrown if the training definition file cannot be parsed.
*/
public TrainingDefinitionFileReader(File file) throws TrainingDefinitionException {
this.file = file;
try {
JAXBContext jaxbContext = JAXBContext.newInstance(Trainingdefinition.class);
Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
trainingDefinition = (Trainingdefinition) jaxbUnmarshaller.unmarshal(file);
} catch (Exception ex) {
throw new TrainingDefinitionException("Invalid training definition file.", ex);
}
}
@Override
public TrainingDefinitionValidationResult validate() {
List<String> messages = new LinkedList<String>();
boolean valid = true;
if(StringUtils.isEmpty(trainingDefinition.getModel().getFile())) {
valid = false;
messages.add("The training definition is missing the output model's file name.");
}
if(StringUtils.isEmpty(trainingDefinition.getModel().getLanguage())) {
valid = false;
messages.add("The training definition is missing the model's language.");
}
if(StringUtils.isEmpty(trainingDefinition.getModel().getType())) {
valid = false;
messages.add("The training definition is missing the model's entity type.");
}
if(trainingDefinition.getTrainingdata().getFormat().equalsIgnoreCase("idyl")) {
if(StringUtils.isEmpty(trainingDefinition.getTrainingdata().getAnnotations())) {
valid = false;
messages.add("The training definition is missing an annotations file name.");
} else {
// TODO: Verify that the file exists.
File file = new File(trainingDefinition.getTrainingdata().getAnnotations());
if(!file.exists()) {
valid = false;
messages.add("The training definition file does not exist.");
}
}
} else {
// Any value other than "idyl" will default to "opennlp".
}
return new TrainingDefinitionValidationResult(valid, messages);
}
@Override
public String getFeatures() {
String features = null;
try {
final String xml = FileUtils.readFileToString(file);
if(xml.contains("<features>")) {
int start = xml.indexOf("<features>") + 10;
int end = xml.indexOf("</features>");
return xml.substring(start, end);
}
} catch (Exception ex) {
LOGGER.error("Unable to extract feature generators from training definition file. This will cause a default set of feature generators to be used which may not be ideal.", ex);
}
return features;
}
@Override
public Trainingdefinition getTrainingDefinition() {
return trainingDefinition;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition/model/TrainingDefinitionException.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.training.definition.model;
/**
* Exception that is thrown when a training definition
* cannot be read.
*
* @author Mountain Fog, Inc.
*
*/
public class TrainingDefinitionException extends Exception {
private static final long serialVersionUID = 7557243516752367591L;
/**
* Creates a new training definition exception.
* @param message The message of the exception.
* @param ex The {@link Exception}.
*/
public TrainingDefinitionException(String message, Exception ex) {
super(message, ex);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition/model/TrainingDefinitionReader.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.training.definition.model;
import ai.idylnlp.training.definition.xml.Trainingdefinition;
/**
* Interface for training definition readers.
*
* @author Mountain Fog, Inc.
*
*/
public interface TrainingDefinitionReader {
/**
* Validates the training definition.
* @return A {@link TrainingDefinitionValidationResult}.
*/
public TrainingDefinitionValidationResult validate();
/**
* Gets the feature generators from the training definition file.
* @return The feature generators.
*/
public String getFeatures();
/**
* Gets the feature generators.
* @return The XML describing the feature generators.
*/
public Trainingdefinition getTrainingDefinition();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file/1.1.0/ai/idylnlp/training/definition/model/TrainingDefinitionValidationResult.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.training.definition.model;
import java.util.List;
public class TrainingDefinitionValidationResult {
private boolean valid;
private List<String> messages;
public TrainingDefinitionValidationResult() {
this.valid = true;
}
public TrainingDefinitionValidationResult(boolean valid, List<String> messages) {
this.valid = valid;
this.messages = messages;
}
public boolean isValid() {
return valid;
}
public List<String> getMessages() {
return messages;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition/xml/Generators.java
|
package ai.idylnlp.training.definition.xml;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.XmlValue;
/**
* <p>Java class for generators complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="generators">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="window" maxOccurs="2" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="tokenclass" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="token" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </all>
* <attribute name="prevLength" type="{http://www.w3.org/2001/XMLSchema}byte" />
* <attribute name="nextLength" type="{http://www.w3.org/2001/XMLSchema}byte" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="definition" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="prevmap" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="bigram" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="tokenclass" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="token" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="sentence" minOccurs="0">
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="begin" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="end" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </simpleContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "generators", propOrder = {
"window",
"definition",
"prevmap",
"bigram",
"tokenclass",
"token",
"sentence"
})
public class Generators {
protected List<Generators.Window> window;
protected String definition;
protected String prevmap;
protected String bigram;
protected String tokenclass;
protected String token;
protected Generators.Sentence sentence;
/**
* Gets the value of the window property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the window property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getWindow().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Generators.Window }
*
*
*/
public List<Generators.Window> getWindow() {
if (window == null) {
window = new ArrayList<Generators.Window>();
}
return this.window;
}
/**
* Gets the value of the definition property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefinition() {
return definition;
}
/**
* Sets the value of the definition property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefinition(String value) {
this.definition = value;
}
/**
* Gets the value of the prevmap property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPrevmap() {
return prevmap;
}
/**
* Sets the value of the prevmap property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPrevmap(String value) {
this.prevmap = value;
}
/**
* Gets the value of the bigram property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBigram() {
return bigram;
}
/**
* Sets the value of the bigram property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBigram(String value) {
this.bigram = value;
}
/**
* Gets the value of the tokenclass property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTokenclass() {
return tokenclass;
}
/**
* Sets the value of the tokenclass property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTokenclass(String value) {
this.tokenclass = value;
}
/**
* Gets the value of the token property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToken() {
return token;
}
/**
* Sets the value of the token property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToken(String value) {
this.token = value;
}
/**
* Gets the value of the sentence property.
*
* @return
* possible object is
* {@link Generators.Sentence }
*
*/
public Generators.Sentence getSentence() {
return sentence;
}
/**
* Sets the value of the sentence property.
*
* @param value
* allowed object is
* {@link Generators.Sentence }
*
*/
public void setSentence(Generators.Sentence value) {
this.sentence = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <simpleContent>
* <extension base="<http://www.w3.org/2001/XMLSchema>string">
* <attribute name="begin" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="end" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </simpleContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"value"
})
public static class Sentence {
@XmlValue
protected String value;
@XmlAttribute(name = "begin")
protected String begin;
@XmlAttribute(name = "end")
protected String end;
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
/**
* Gets the value of the begin property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBegin() {
return begin;
}
/**
* Sets the value of the begin property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBegin(String value) {
this.begin = value;
}
/**
* Gets the value of the end property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEnd() {
return end;
}
/**
* Sets the value of the end property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEnd(String value) {
this.end = value;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="tokenclass" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="token" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </all>
* <attribute name="prevLength" type="{http://www.w3.org/2001/XMLSchema}byte" />
* <attribute name="nextLength" type="{http://www.w3.org/2001/XMLSchema}byte" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
})
public static class Window {
protected String tokenclass;
protected String token;
@XmlAttribute(name = "prevLength")
protected Byte prevLength;
@XmlAttribute(name = "nextLength")
protected Byte nextLength;
/**
* Gets the value of the tokenclass property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTokenclass() {
return tokenclass;
}
/**
* Sets the value of the tokenclass property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTokenclass(String value) {
this.tokenclass = value;
}
/**
* Gets the value of the token property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToken() {
return token;
}
/**
* Sets the value of the token property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToken(String value) {
this.token = value;
}
/**
* Gets the value of the prevLength property.
*
* @return
* possible object is
* {@link Byte }
*
*/
public Byte getPrevLength() {
return prevLength;
}
/**
* Sets the value of the prevLength property.
*
* @param value
* allowed object is
* {@link Byte }
*
*/
public void setPrevLength(Byte value) {
this.prevLength = value;
}
/**
* Gets the value of the nextLength property.
*
* @return
* possible object is
* {@link Byte }
*
*/
public Byte getNextLength() {
return nextLength;
}
/**
* Sets the value of the nextLength property.
*
* @param value
* allowed object is
* {@link Byte }
*
*/
public void setNextLength(Byte value) {
this.nextLength = value;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition/xml/ObjectFactory.java
|
package ai.idylnlp.training.definition.xml;
import javax.xml.bind.annotation.XmlRegistry;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the ai.idylnlp.training.definition.xml package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: ai.idylnlp.training.definition.xml
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link Trainingdefinition }
*
*/
public Trainingdefinition createTrainingdefinition() {
return new Trainingdefinition();
}
/**
* Create an instance of {@link ai.idylnlp.training.definition.xml.Generators }
*
*/
public ai.idylnlp.training.definition.xml.Generators createGenerators() {
return new ai.idylnlp.training.definition.xml.Generators();
}
/**
* Create an instance of {@link Trainingdefinition.Features }
*
*/
public Trainingdefinition.Features createTrainingdefinitionFeatures() {
return new Trainingdefinition.Features();
}
/**
* Create an instance of {@link Trainingdefinition.Features.Generators }
*
*/
public Trainingdefinition.Features.Generators createTrainingdefinitionFeaturesGenerators() {
return new Trainingdefinition.Features.Generators();
}
/**
* Create an instance of {@link Trainingdefinition.Algorithm }
*
*/
public Trainingdefinition.Algorithm createTrainingdefinitionAlgorithm() {
return new Trainingdefinition.Algorithm();
}
/**
* Create an instance of {@link Trainingdefinition.Trainingdata }
*
*/
public Trainingdefinition.Trainingdata createTrainingdefinitionTrainingdata() {
return new Trainingdefinition.Trainingdata();
}
/**
* Create an instance of {@link Trainingdefinition.Evaluationdata }
*
*/
public Trainingdefinition.Evaluationdata createTrainingdefinitionEvaluationdata() {
return new Trainingdefinition.Evaluationdata();
}
/**
* Create an instance of {@link Trainingdefinition.Model }
*
*/
public Trainingdefinition.Model createTrainingdefinitionModel() {
return new Trainingdefinition.Model();
}
/**
* Create an instance of {@link ai.idylnlp.training.definition.xml.Generators.Window }
*
*/
public ai.idylnlp.training.definition.xml.Generators.Window createGeneratorsWindow() {
return new ai.idylnlp.training.definition.xml.Generators.Window();
}
/**
* Create an instance of {@link ai.idylnlp.training.definition.xml.Generators.Sentence }
*
*/
public ai.idylnlp.training.definition.xml.Generators.Sentence createGeneratorsSentence() {
return new ai.idylnlp.training.definition.xml.Generators.Sentence();
}
/**
* Create an instance of {@link Trainingdefinition.Features.Generators.Cache }
*
*/
public Trainingdefinition.Features.Generators.Cache createTrainingdefinitionFeaturesGeneratorsCache() {
return new Trainingdefinition.Features.Generators.Cache();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition/xml/Trainingdefinition.java
|
package ai.idylnlp.training.definition.xml;
import java.math.BigDecimal;
import java.math.BigInteger;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="algorithm">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" default="perceptron" />
* <attribute name="cutoff" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="0" />
* <attribute name="iterations" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="100" />
* <attribute name="threads" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="2" />
* <attribute name="l1" type="{http://www.w3.org/2001/XMLSchema}decimal" default="0.1" />
* <attribute name="l2" type="{http://www.w3.org/2001/XMLSchema}decimal" default="0.1" />
* <attribute name="m" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="15" />
* <attribute name="max" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="30000" />
* <attribute name="windowSize" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="5" />
* <attribute name="vectors" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="trainingdata">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="format" type="{http://www.w3.org/2001/XMLSchema}string" default="opennlp" />
* <attribute name="annotations" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="evaluationdata">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="format" type="{http://www.w3.org/2001/XMLSchema}string" default="opennlp" />
* <attribute name="annotations" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="model">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="encryptionkey" type="{http://www.w3.org/2001/XMLSchema}string" default="none" />
* <attribute name="language" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="type" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="features">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="cache">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators" type="{http://www.mtnfog.com}generators"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </all>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
})
@XmlRootElement(name = "trainingdefinition")
public class Trainingdefinition {
@XmlElement(required = true)
protected Trainingdefinition.Algorithm algorithm;
@XmlElement(required = true)
protected Trainingdefinition.Trainingdata trainingdata;
@XmlElement(required = true)
protected Trainingdefinition.Evaluationdata evaluationdata;
@XmlElement(required = true)
protected Trainingdefinition.Model model;
@XmlElement(required = true)
protected Trainingdefinition.Features features;
/**
* Gets the value of the algorithm property.
*
* @return
* possible object is
* {@link Trainingdefinition.Algorithm }
*
*/
public Trainingdefinition.Algorithm getAlgorithm() {
return algorithm;
}
/**
* Sets the value of the algorithm property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Algorithm }
*
*/
public void setAlgorithm(Trainingdefinition.Algorithm value) {
this.algorithm = value;
}
/**
* Gets the value of the trainingdata property.
*
* @return
* possible object is
* {@link Trainingdefinition.Trainingdata }
*
*/
public Trainingdefinition.Trainingdata getTrainingdata() {
return trainingdata;
}
/**
* Sets the value of the trainingdata property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Trainingdata }
*
*/
public void setTrainingdata(Trainingdefinition.Trainingdata value) {
this.trainingdata = value;
}
/**
* Gets the value of the evaluationdata property.
*
* @return
* possible object is
* {@link Trainingdefinition.Evaluationdata }
*
*/
public Trainingdefinition.Evaluationdata getEvaluationdata() {
return evaluationdata;
}
/**
* Sets the value of the evaluationdata property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Evaluationdata }
*
*/
public void setEvaluationdata(Trainingdefinition.Evaluationdata value) {
this.evaluationdata = value;
}
/**
* Gets the value of the model property.
*
* @return
* possible object is
* {@link Trainingdefinition.Model }
*
*/
public Trainingdefinition.Model getModel() {
return model;
}
/**
* Sets the value of the model property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Model }
*
*/
public void setModel(Trainingdefinition.Model value) {
this.model = value;
}
/**
* Gets the value of the features property.
*
* @return
* possible object is
* {@link Trainingdefinition.Features }
*
*/
public Trainingdefinition.Features getFeatures() {
return features;
}
/**
* Sets the value of the features property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Features }
*
*/
public void setFeatures(Trainingdefinition.Features value) {
this.features = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" default="perceptron" />
* <attribute name="cutoff" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="0" />
* <attribute name="iterations" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="100" />
* <attribute name="threads" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="2" />
* <attribute name="l1" type="{http://www.w3.org/2001/XMLSchema}decimal" default="0.1" />
* <attribute name="l2" type="{http://www.w3.org/2001/XMLSchema}decimal" default="0.1" />
* <attribute name="m" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="15" />
* <attribute name="max" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="30000" />
* <attribute name="windowSize" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger" default="5" />
* <attribute name="vectors" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Algorithm {
@XmlAttribute(name = "name")
protected String name;
@XmlAttribute(name = "cutoff")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger cutoff;
@XmlAttribute(name = "iterations")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger iterations;
@XmlAttribute(name = "threads")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger threads;
@XmlAttribute(name = "l1")
protected BigDecimal l1;
@XmlAttribute(name = "l2")
protected BigDecimal l2;
@XmlAttribute(name = "m")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger m;
@XmlAttribute(name = "max")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger max;
@XmlAttribute(name = "windowSize")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger windowSize;
@XmlAttribute(name = "vectors")
protected String vectors;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
if (name == null) {
return "perceptron";
} else {
return name;
}
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the cutoff property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getCutoff() {
if (cutoff == null) {
return new BigInteger("0");
} else {
return cutoff;
}
}
/**
* Sets the value of the cutoff property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setCutoff(BigInteger value) {
this.cutoff = value;
}
/**
* Gets the value of the iterations property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getIterations() {
if (iterations == null) {
return new BigInteger("100");
} else {
return iterations;
}
}
/**
* Sets the value of the iterations property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setIterations(BigInteger value) {
this.iterations = value;
}
/**
* Gets the value of the threads property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getThreads() {
if (threads == null) {
return new BigInteger("2");
} else {
return threads;
}
}
/**
* Sets the value of the threads property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setThreads(BigInteger value) {
this.threads = value;
}
/**
* Gets the value of the l1 property.
*
* @return
* possible object is
* {@link BigDecimal }
*
*/
public BigDecimal getL1() {
if (l1 == null) {
return new BigDecimal("0.1");
} else {
return l1;
}
}
/**
* Sets the value of the l1 property.
*
* @param value
* allowed object is
* {@link BigDecimal }
*
*/
public void setL1(BigDecimal value) {
this.l1 = value;
}
/**
* Gets the value of the l2 property.
*
* @return
* possible object is
* {@link BigDecimal }
*
*/
public BigDecimal getL2() {
if (l2 == null) {
return new BigDecimal("0.1");
} else {
return l2;
}
}
/**
* Sets the value of the l2 property.
*
* @param value
* allowed object is
* {@link BigDecimal }
*
*/
public void setL2(BigDecimal value) {
this.l2 = value;
}
/**
* Gets the value of the m property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getM() {
if (m == null) {
return new BigInteger("15");
} else {
return m;
}
}
/**
* Sets the value of the m property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setM(BigInteger value) {
this.m = value;
}
/**
* Gets the value of the max property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getMax() {
if (max == null) {
return new BigInteger("30000");
} else {
return max;
}
}
/**
* Sets the value of the max property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setMax(BigInteger value) {
this.max = value;
}
/**
* Gets the value of the windowSize property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getWindowSize() {
if (windowSize == null) {
return new BigInteger("5");
} else {
return windowSize;
}
}
/**
* Sets the value of the windowSize property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setWindowSize(BigInteger value) {
this.windowSize = value;
}
/**
* Gets the value of the vectors property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getVectors() {
return vectors;
}
/**
* Sets the value of the vectors property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setVectors(String value) {
this.vectors = value;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="format" type="{http://www.w3.org/2001/XMLSchema}string" default="opennlp" />
* <attribute name="annotations" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Evaluationdata {
@XmlAttribute(name = "file", required = true)
protected String file;
@XmlAttribute(name = "format")
protected String format;
@XmlAttribute(name = "annotations")
protected String annotations;
/**
* Gets the value of the file property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFile() {
return file;
}
/**
* Sets the value of the file property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFile(String value) {
this.file = value;
}
/**
* Gets the value of the format property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFormat() {
if (format == null) {
return "opennlp";
} else {
return format;
}
}
/**
* Sets the value of the format property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFormat(String value) {
this.format = value;
}
/**
* Gets the value of the annotations property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAnnotations() {
return annotations;
}
/**
* Sets the value of the annotations property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAnnotations(String value) {
this.annotations = value;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="cache">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators" type="{http://www.mtnfog.com}generators"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"generators"
})
public static class Features {
@XmlElement(required = true)
protected Trainingdefinition.Features.Generators generators;
/**
* Gets the value of the generators property.
*
* @return
* possible object is
* {@link Trainingdefinition.Features.Generators }
*
*/
public Trainingdefinition.Features.Generators getGenerators() {
return generators;
}
/**
* Sets the value of the generators property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Features.Generators }
*
*/
public void setGenerators(Trainingdefinition.Features.Generators value) {
this.generators = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="cache">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators" type="{http://www.mtnfog.com}generators"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"cache"
})
public static class Generators {
@XmlElement(required = true)
protected Trainingdefinition.Features.Generators.Cache cache;
/**
* Gets the value of the cache property.
*
* @return
* possible object is
* {@link Trainingdefinition.Features.Generators.Cache }
*
*/
public Trainingdefinition.Features.Generators.Cache getCache() {
return cache;
}
/**
* Sets the value of the cache property.
*
* @param value
* allowed object is
* {@link Trainingdefinition.Features.Generators.Cache }
*
*/
public void setCache(Trainingdefinition.Features.Generators.Cache value) {
this.cache = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="generators" type="{http://www.mtnfog.com}generators"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"generators"
})
public static class Cache {
@XmlElement(required = true)
protected ai.idylnlp.training.definition.xml.Generators generators;
/**
* Gets the value of the generators property.
*
* @return
* possible object is
* {@link ai.idylnlp.training.definition.xml.Generators }
*
*/
public ai.idylnlp.training.definition.xml.Generators getGenerators() {
return generators;
}
/**
* Sets the value of the generators property.
*
* @param value
* allowed object is
* {@link ai.idylnlp.training.definition.xml.Generators }
*
*/
public void setGenerators(ai.idylnlp.training.definition.xml.Generators value) {
this.generators = value;
}
}
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="encryptionkey" type="{http://www.w3.org/2001/XMLSchema}string" default="none" />
* <attribute name="language" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="type" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Model {
@XmlAttribute(name = "name", required = true)
protected String name;
@XmlAttribute(name = "file", required = true)
protected String file;
@XmlAttribute(name = "encryptionkey")
protected String encryptionkey;
@XmlAttribute(name = "language", required = true)
protected String language;
@XmlAttribute(name = "type", required = true)
protected String type;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the file property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFile() {
return file;
}
/**
* Sets the value of the file property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFile(String value) {
this.file = value;
}
/**
* Gets the value of the encryptionkey property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEncryptionkey() {
if (encryptionkey == null) {
return "none";
} else {
return encryptionkey;
}
}
/**
* Sets the value of the encryptionkey property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEncryptionkey(String value) {
this.encryptionkey = value;
}
/**
* Gets the value of the language property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLanguage() {
return language;
}
/**
* Sets the value of the language property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLanguage(String value) {
this.language = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setType(String value) {
this.type = value;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="file" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="format" type="{http://www.w3.org/2001/XMLSchema}string" default="opennlp" />
* <attribute name="annotations" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Trainingdata {
@XmlAttribute(name = "file", required = true)
protected String file;
@XmlAttribute(name = "format")
protected String format;
@XmlAttribute(name = "annotations")
protected String annotations;
/**
* Gets the value of the file property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFile() {
return file;
}
/**
* Sets the value of the file property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFile(String value) {
this.file = value;
}
/**
* Gets the value of the format property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFormat() {
if (format == null) {
return "opennlp";
} else {
return format;
}
}
/**
* Sets the value of the format property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFormat(String value) {
this.format = value;
}
/**
* Gets the value of the annotations property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAnnotations() {
return annotations;
}
/**
* Sets the value of the annotations property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAnnotations(String value) {
this.annotations = value;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition
|
java-sources/ai/idylnlp/idylnlp-training-definition-file-xml/1.1.0/ai/idylnlp/training/definition/xml/package-info.java
|
@javax.xml.bind.annotation.XmlSchema(namespace = "http://www.mtnfog.com", elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED)
package ai.idylnlp.training.definition.xml;
|
0
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp/zoo/IdylNLPModelZoo.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.zoo;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.FileUtils;
import com.neovisionaries.i18n.LanguageCode;
import ai.idylnlp.zoo.model.Model;
import okhttp3.ResponseBody;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Client for the Idyl NLP model zoo that facilitates downloading
* NLP models from the zoo.
*
* @author Mountain Fog, Inc.
*
*/
public class IdylNLPModelZoo {
private static final String IDYLNLP_MODEL_ZOO_ENDPOINT = "https://zoo.idylnlp.ai";
private ModelZooClient client;
private String token;
/**
* Creates a new client using the default Idyl NLP endpoint.
* @param token The client token.
*/
public IdylNLPModelZoo(String token) {
this.token = token;
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(IDYLNLP_MODEL_ZOO_ENDPOINT)
.addConverterFactory(GsonConverterFactory.create())
.build();
client = retrofit.create(ModelZooClient.class);
}
/**
* Creates a new client.
* @param endpoint The Idyl NLP model zoo endpoint.
* @param token The client token.
*/
public IdylNLPModelZoo(String endpoint, String token) {
this.token = token;
final Retrofit retrofit = new Retrofit.Builder()
.baseUrl(endpoint)
.addConverterFactory(GsonConverterFactory.create())
.build();
client = retrofit.create(ModelZooClient.class);
}
/**
* Downloads a model from the zoo.
* @param modelId The model's ID.
* @param destination A {@link File} to hold the downloaded model.
* @throws IOException Thrown if the model file cannot be downloaded.
*/
public void downloadModel(String modelId, File destination) throws IOException {
final ResponseBody responseBody = client.getModelUrl(token, modelId).execute().body();
FileUtils.copyInputStreamToFile(responseBody.byteStream(), destination);
}
/**
* Finds all available models for a given language.
* @param language The language.
* @return
* @throws IOException
*/
public List<Model> getModelsByLanguage(LanguageCode languageCode) throws IOException {
return client.getModelsForLanguage(token, languageCode.getAlpha3().toString().toLowerCase()).execute().body();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp/zoo/ModelZooClient.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.zoo;
import java.util.List;
import ai.idylnlp.zoo.model.Model;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Path;
import retrofit2.http.Streaming;
public interface ModelZooClient {
@GET("/model/{id}")
@Streaming
Call<ResponseBody> getModelUrl(@Header("X-Token") String token, @Path("id") String modelId);
@GET("/models/{language}")
Call<List<Model>> getModelsForLanguage(@Header("X-Token") String token, @Path("language") String language);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp/zoo
|
java-sources/ai/idylnlp/idylnlp-zoo-client/1.1.0/ai/idylnlp/zoo/model/Model.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.zoo.model;
public class Model {
private String id;
private String url;
private String language;
private String type;
private String version;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/Capability.java
|
package ai.knowly.langtorch.capability;
import com.google.common.util.concurrent.ListenableFuture;
public interface Capability<T, R> {
R run(T inputData);
ListenableFuture<R> runAsync(T inputData);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/graph/CapabilityGraph.java
|
package ai.knowly.langtorch.capability.graph;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.reflect.TypeToken;
import java.util.*;
import java.util.concurrent.ExecutionException;
/** Class representing a directed acyclic graph (DAG) of capabilities. */
@AutoValue
public abstract class CapabilityGraph {
public static CapabilityGraph create() {
return new AutoValue_CapabilityGraph(
new HashMap<>(),
ArrayListMultimap.create(),
new HashMap<>(),
ArrayListMultimap.create(),
new HashMap<>());
}
abstract HashMap<String, NodeAdapter<?, ?>> nodes();
abstract Multimap<String, Object> inputMap();
abstract HashMap<String, Object> outputMap();
abstract Multimap<String, String> inDegreeMap();
abstract HashMap<String, TypeToken<?>> inputTypes();
/**
* Add a node to the CapabilityDAG.
*
* @param nodeAdapter Node to be added
* @param inputType Class object representing the input type of the node
* @param <I> Input type of the node
* @param <O> Output type of the node
*/
public <I, O> void addNode(NodeAdapter<I, O> nodeAdapter, Class<I> inputType) {
nodes().put(nodeAdapter.getId(), nodeAdapter);
inputTypes().put(nodeAdapter.getId(), TypeToken.of(inputType));
for (String outDegree : nodeAdapter.getOutDegree()) {
inDegreeMap().put(outDegree, nodeAdapter.getId());
}
}
/**
* Process the CapabilityDAG with the given initial inputs.
*
* @param initialInputMap Map of node IDs to their initial input values
* @return Map of end node IDs to their final output values
*/
public Map<String, Object> process(Map<String, Object> initialInputMap)
throws ExecutionException, InterruptedException {
for (Map.Entry<String, Object> entry : initialInputMap.entrySet()) {
setInitialInput(entry.getKey(), entry.getValue());
}
List<String> sortedList = topologicalSort();
for (String id : sortedList) {
NodeAdapter<?, ?> nodeAdapter = nodes().get(id);
Collection<Object> input = inputMap().get(id);
Object output = processNode(nodeAdapter, input);
addOutput(id, output);
for (String outDegree : nodeAdapter.getOutDegree()) {
addInput(outDegree, output);
}
}
Map<String, Object> result = new HashMap<>();
for (String id : getEndNodeIds()) {
result.put(id, outputMap().get(id));
}
return result;
}
@SuppressWarnings("unchecked")
private <I, O> O processNode(NodeAdapter<I, O> nodeAdapter, Collection<Object> input)
throws ExecutionException, InterruptedException {
Iterable<I> typedInput = (Iterable<I>) input;
return nodeAdapter.process(typedInput);
}
public Object getOutput(String id) {
return outputMap().get(id);
}
private List<String> getEndNodeIds() {
List<String> endNodeIds = new ArrayList<>();
for (NodeAdapter<?, ?> nodeAdapter : nodes().values()) {
if (nodeAdapter.getOutDegree().isEmpty()) {
endNodeIds.add(nodeAdapter.getId());
}
}
return endNodeIds;
}
private void setInitialInput(String id, Object input) {
TypeToken<?> expectedType = inputTypes().get(id);
if (!expectedType.isSupertypeOf(input.getClass())) {
throw new IllegalArgumentException(
"Input type for node " + id + " does not match the expected type");
}
inputMap().put(id, input);
}
private void addInput(String id, Object input) {
inputMap().put(id, input);
}
private void addOutput(String id, Object output) {
outputMap().put(id, output);
}
private List<String> topologicalSort() {
List<String> sorted = new ArrayList<>();
Queue<String> queue = new LinkedList<>();
HashMap<String, Integer> inDegrees = new HashMap<>();
for (Map.Entry<String, NodeAdapter<?, ?>> entry : nodes().entrySet()) {
int degree = inDegreeMap().get(entry.getKey()).size();
inDegrees.put(entry.getKey(), degree);
if (degree == 0) {
queue.offer(entry.getKey());
}
}
while (!queue.isEmpty()) {
String current = queue.poll();
sorted.add(current);
for (String outDegree : nodes().get(current).getOutDegree()) {
int degree = inDegrees.get(outDegree) - 1;
inDegrees.put(outDegree, degree);
if (degree == 0) {
queue.offer(outDegree);
}
}
}
if (sorted.size() != nodes().size()) {
throw new IllegalStateException("The graph contains a cycle");
}
return sorted;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/graph/NodeAdapter.java
|
package ai.knowly.langtorch.capability.graph;
import java.util.List;
import java.util.concurrent.ExecutionException;
/**
* Interface representing a node in the CapabilityDAG.
*
* @param <I> Input type of the node
* @param <O> Output type of the node
*/
public interface NodeAdapter<I, O> {
String getId();
List<String> getOutDegree();
O process(Iterable<I> inputs) throws ExecutionException, InterruptedException;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration/minimax/SimpleChatCapability.java
|
package ai.knowly.langtorch.capability.integration.minimax;
import ai.knowly.langtorch.capability.modality.text.MiniMaxChatCompletionLLMCapability;
import ai.knowly.langtorch.capability.modality.text.Parsers;
import ai.knowly.langtorch.preprocessing.parser.ChatMessageToStringParser;
import ai.knowly.langtorch.preprocessing.parser.StringToMultiChatMessageParser;
import ai.knowly.langtorch.processor.minimax.chat.MiniMaxChatProcessor;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import ai.knowly.langtorch.store.memory.conversation.ConversationMemory;
/**
* A simple chat capability unit that leverages minimax api to generate response
*
* @author maxiao
* @date 2023/06/13
*/
public class SimpleChatCapability extends MiniMaxChatCompletionLLMCapability<String, String> {
public SimpleChatCapability(
MiniMaxChatProcessor miniMaxChatProcessor, ConversationMemory memory) {
super(
miniMaxChatProcessor,
Parsers.<String, MultiChatMessage, ChatMessage, String>builder()
.setInputParser(StringToMultiChatMessageParser.create())
.setOutputParser(ChatMessageToStringParser.create())
.build(),
memory);
}
@Override
public SimpleChatCapability withVerboseMode(boolean verboseMode) {
super.withVerboseMode(verboseMode);
return this;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration/openai/PromptTemplateTextCapability.java
|
package ai.knowly.langtorch.capability.integration.openai;
import ai.knowly.langtorch.capability.modality.text.Parsers;
import ai.knowly.langtorch.capability.modality.text.TextCompletionTextLLMCapability;
import ai.knowly.langtorch.preprocessing.parser.SingleTextToStringParser;
import ai.knowly.langtorch.preprocessing.parser.StringToSingleTextParser;
import ai.knowly.langtorch.processor.openai.text.OpenAITextProcessor;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import ai.knowly.langtorch.schema.text.SingleText;
import java.util.Map;
import javax.inject.Inject;
/**
* A capability unit that contains a prompt template and accepts a map of variable-value pairs to
* the prompt template.
*/
public class PromptTemplateTextCapability extends TextCompletionTextLLMCapability<String, String> {
private final PromptTemplate promptTemplate;
@Inject
public PromptTemplateTextCapability(
OpenAITextProcessor openAITextProcessor, PromptTemplate promptTemplate) {
super(
openAITextProcessor,
Parsers.<String, SingleText, SingleText, String>builder()
.setInputParser(StringToSingleTextParser.create())
.setOutputParser(SingleTextToStringParser.create())
.build());
this.promptTemplate = promptTemplate;
}
public String run(Map<String, String> variableMap) {
return super.run(
promptTemplate.toBuilder().addAllVariableValuePairs(variableMap).build().format());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration/openai/SimpleChatCapability.java
|
package ai.knowly.langtorch.capability.integration.openai;
import ai.knowly.langtorch.capability.modality.text.ChatCompletionLLMCapability;
import ai.knowly.langtorch.capability.modality.text.Parsers;
import ai.knowly.langtorch.preprocessing.parser.ChatMessageToStringParser;
import ai.knowly.langtorch.preprocessing.parser.StringToMultiChatMessageParser;
import ai.knowly.langtorch.processor.openai.chat.OpenAIChatProcessor;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import ai.knowly.langtorch.store.memory.conversation.ConversationMemory;
import javax.inject.Inject;
/** A simple chat capability unit that leverages openai api to generate response */
public class SimpleChatCapability extends ChatCompletionLLMCapability<String, String> {
@Inject
public SimpleChatCapability(OpenAIChatProcessor openAIChatProcessor, ConversationMemory memory) {
super(
openAIChatProcessor,
Parsers.<String, MultiChatMessage, ChatMessage, String>builder()
.setInputParser(StringToMultiChatMessageParser.create())
.setOutputParser(ChatMessageToStringParser.create())
.build(),
memory);
}
@Override
public SimpleChatCapability withVerboseMode(boolean verboseMode) {
super.withVerboseMode(verboseMode);
return this;
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/integration/openai/SimpleTextCapability.java
|
package ai.knowly.langtorch.capability.integration.openai;
import ai.knowly.langtorch.capability.modality.text.Parsers;
import ai.knowly.langtorch.capability.modality.text.TextCompletionTextLLMCapability;
import ai.knowly.langtorch.preprocessing.parser.SingleTextToStringParser;
import ai.knowly.langtorch.preprocessing.parser.StringToSingleTextParser;
import ai.knowly.langtorch.processor.openai.text.OpenAITextProcessor;
import ai.knowly.langtorch.schema.text.SingleText;
import javax.inject.Inject;
/** A simple text capability unit that leverages openai api to generate response */
public class SimpleTextCapability extends TextCompletionTextLLMCapability<String, String> {
@Inject
public SimpleTextCapability(OpenAITextProcessor openAITextProcessor) {
super(
openAITextProcessor,
Parsers.<String, SingleText, SingleText, String>builder()
.setInputParser(StringToSingleTextParser.create())
.setOutputParser(SingleTextToStringParser.create())
.build());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/CapabilityConfig.java
|
package ai.knowly.langtorch.capability.local.v1;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
/** A class for the capability config. */
@Data
public class CapabilityConfig {
@SerializedName("schema")
private int schema;
@SerializedName("description")
private String description;
@SerializedName("type")
private String type;
@SerializedName("completion")
private Completion completion;
@SerializedName("input")
private Input input;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/Completion.java
|
package ai.knowly.langtorch.capability.local.v1;
import com.google.gson.annotations.SerializedName;
import java.util.List;
import lombok.Data;
/** A class for the embedded completion config. */
@Data
public class Completion {
@SerializedName("max_tokens")
private int maxTokens;
@SerializedName("temperature")
private double temperature;
@SerializedName("top_p")
private double topP;
@SerializedName("presence_penalty")
private double presencePenalty;
@SerializedName("frequency_penalty")
private double frequencyPenalty;
@SerializedName("stop_sequences")
private List<String> stopSequences;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/Input.java
|
package ai.knowly.langtorch.capability.local.v1;
import com.google.gson.annotations.SerializedName;
import java.util.List;
import lombok.Data;
/** A class for the completion config. */
@Data
public class Input {
@SerializedName("parameters")
private List<Parameter> parameters;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/LocalCapabilityReadException.java
|
package ai.knowly.langtorch.capability.local.v1;
import java.io.IOException;
public class LocalCapabilityReadException extends RuntimeException {
public LocalCapabilityReadException(IOException e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/LocalCapabilityUnitReader.java
|
package ai.knowly.langtorch.capability.local.v1;
import ai.knowly.langtorch.prompt.template.PromptTemplate;
import com.google.gson.Gson;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import javax.inject.Inject;
import org.apache.commons.io.IOUtils;
/** A class for the capability. */
public class LocalCapabilityUnitReader {
private static final String PROMPT_FILE_NAME = "prompt.txt";
private static final String CONFIG_FILE_NAME = "config.json";
private final String prompt;
private final CapabilityConfig config;
@Inject
public LocalCapabilityUnitReader(String capabilityPath, Gson gson) {
this.prompt = readFile(capabilityPath, TARGET.PROMPT);
this.config = gson.fromJson(readFile(capabilityPath, TARGET.CONFIG), CapabilityConfig.class);
}
public String getPrompt() {
return prompt;
}
public CapabilityConfig getConfig() {
return config;
}
public PromptTemplate getPromptTemplate() {
return PromptTemplate.builder().setTemplate(prompt).build();
}
private String readFile(String folderPath, TARGET target) {
String fileName = target == TARGET.CONFIG ? CONFIG_FILE_NAME : PROMPT_FILE_NAME;
String path = String.format("%s/%s", folderPath, fileName);
try (FileInputStream inputStream = new FileInputStream(path)) {
return IOUtils.toString(inputStream, Charset.defaultCharset());
} catch (IOException e) {
throw new LocalCapabilityReadException(e);
}
}
private enum TARGET {
CONFIG,
PROMPT
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/local/v1/Parameter.java
|
package ai.knowly.langtorch.capability.local.v1;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
/** A class for the capability config. */
@Data
public class Parameter {
@SerializedName("name")
private String name;
@SerializedName("description")
private String description;
@SerializedName("defaultValue")
private String defaultValue;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/ChatCompletionLLMCapability.java
|
package ai.knowly.langtorch.capability.modality.text;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import ai.knowly.langtorch.store.memory.Memory;
import ai.knowly.langtorch.store.memory.conversation.ConversationMemoryContext;
import com.google.common.flogger.FluentLogger;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
/** Capability for a chat completion language model. */
public class ChatCompletionLLMCapability<I, O>
implements TextLLMCapabilityWithMemory<
I, MultiChatMessage, ChatMessage, O, ChatMessage, ConversationMemoryContext> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final Processor<MultiChatMessage, ChatMessage> processor;
private final Parsers<I, MultiChatMessage, ChatMessage, O> parsers;
private final Memory<ChatMessage, ConversationMemoryContext> memory;
private boolean verbose;
@Inject
public ChatCompletionLLMCapability(
Processor<MultiChatMessage, ChatMessage> processor,
Parsers<I, MultiChatMessage, ChatMessage, O> parsers,
Memory<ChatMessage, ConversationMemoryContext> memory) {
this.processor = processor;
this.parsers = parsers;
this.memory = memory;
this.verbose = false;
}
protected ChatCompletionLLMCapability<I, O> withVerboseMode(boolean verbose) {
this.verbose = verbose;
return this;
}
@Override
public MultiChatMessage preProcess(I inputData) {
if (inputData instanceof MultiChatMessage) {
return (MultiChatMessage) inputData;
}
return parsers
.getInputParser()
.map(parser -> parser.parse(inputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Input data is not a MultiChatMessage and no input parser is present."));
}
@Override
public O postProcess(ChatMessage outputData) {
return parsers
.getOutputParser()
.map(parser -> parser.parse(outputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Output data type is not ChatMessage and no output parser is present."));
}
@Override
public Memory<ChatMessage, ConversationMemoryContext> getMemory() {
return memory;
}
@Override
public O run(I inputData) {
return postProcess(generateMemorySideEffectResponse(preProcess(inputData)));
}
private ChatMessage generateMemorySideEffectResponse(MultiChatMessage multiChatMessage) {
if (verbose) {
logger.atInfo().log("Memory before processing: %s", memory);
}
ChatMessage response = processor.run(getMessageWithMemorySideEffect(multiChatMessage));
// Adding prompt and response.
multiChatMessage.getMessages().forEach(memory::add);
memory.add(response);
return response;
}
private MultiChatMessage getMessageWithMemorySideEffect(MultiChatMessage message) {
// Memory context being empty means that this is the first message in the conversation
String memoryContext = memory.getMemoryContext().get();
if (memoryContext.isEmpty()) {
return message;
}
MultiChatMessage updatedMessage =
message.getMessages().stream()
.map(
chatMessage ->
new ChatMessage(
String.format(
"%s%nBelow is my query:%n%s", memoryContext, chatMessage.toString()),
chatMessage.getRole(),
null,
null))
.collect(MultiChatMessage.toMultiChatMessage());
if (verbose) {
logger.atInfo().log("Updated Message with Memory Side Effect: %s", updatedMessage);
}
return updatedMessage;
}
@Override
public ListenableFuture<O> runAsync(I inputData) {
return FluentFuture.from(immediateFuture(inputData)).transform(this::run, directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/MiniMaxChatCompletionLLMCapability.java
|
package ai.knowly.langtorch.capability.modality.text;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.chat.ChatMessage;
import ai.knowly.langtorch.schema.text.MultiChatMessage;
import ai.knowly.langtorch.store.memory.Memory;
import ai.knowly.langtorch.store.memory.conversation.ConversationMemoryContext;
import com.google.common.flogger.FluentLogger;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.inject.Inject;
/**
* @author maxiao
* @date 2023/06/11
*/
public class MiniMaxChatCompletionLLMCapability<I, O>
implements TextLLMCapabilityWithMemory<
I, MultiChatMessage, ChatMessage, O, ChatMessage, ConversationMemoryContext> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final Processor<MultiChatMessage, ChatMessage> processor;
private Parsers<I, MultiChatMessage, ChatMessage, O> parsers;
private Memory<ChatMessage, ConversationMemoryContext> memory;
private boolean verbose;
@Inject
public MiniMaxChatCompletionLLMCapability(
Processor<MultiChatMessage, ChatMessage> processor,
Parsers<I, MultiChatMessage, ChatMessage, O> parsers,
Memory<ChatMessage, ConversationMemoryContext> memory) {
this.processor = processor;
this.parsers = parsers;
this.memory = memory;
this.verbose = false;
}
protected MiniMaxChatCompletionLLMCapability<I, O> withVerboseMode(boolean verbose) {
this.verbose = verbose;
return this;
}
@Override
public O run(I inputData) {
return postProcess(generateMemorySideEffectResponse(preProcess(inputData)));
}
private ChatMessage generateMemorySideEffectResponse(MultiChatMessage multiChatMessage) {
if (verbose) {
logger.atInfo().log("Memory before processing: %s", memory);
}
ChatMessage response = processor.run(getMessageWithMemorySideEffect(multiChatMessage));
// Adding prompt and response.
multiChatMessage.getMessages().forEach(memory::add);
ChatMessage message = new ChatMessage(response.getContent(), response.getRole(), null, null);
memory.add(message);
return message;
}
private MultiChatMessage getMessageWithMemorySideEffect(MultiChatMessage message) {
// Memory context being empty means that this is the first message in the conversation
String memoryContext = memory.getMemoryContext().get();
if (memoryContext.isEmpty()) {
return message;
}
MultiChatMessage updatedMessage =
message.getMessages().stream()
.map(
chatMessage ->
new ChatMessage(
String.format(
"%s%nBelow is my query:%n%s", memoryContext, chatMessage.toString()),
chatMessage.getRole(),
null,
null))
.collect(MultiChatMessage.toMultiChatMessage());
if (verbose) {
logger.atInfo().log("Updated Message with Memory Side Effect: %s", updatedMessage);
}
return updatedMessage;
}
@Override
public MultiChatMessage preProcess(I inputData) {
if (inputData instanceof MultiChatMessage) {
return (MultiChatMessage) inputData;
}
return parsers
.getInputParser()
.map(parser -> parser.parse(inputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Input data is not a MultiChatMessage and no input parser is present."));
}
@Override
public Memory<ChatMessage, ConversationMemoryContext> getMemory() {
return memory;
}
@Override
public O postProcess(ChatMessage outputData) {
return parsers
.getOutputParser()
.map(parser -> parser.parse(outputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Output data type is not ChatMessage and no output parser is present."));
}
@Override
public ListenableFuture<O> runAsync(I inputData) {
return FluentFuture.from(immediateFuture(inputData)).transform(this::run, directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/Parsers.java
|
package ai.knowly.langtorch.capability.modality.text;
import ai.knowly.langtorch.preprocessing.parser.Parser;
import java.util.Optional;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
public class Parsers<I, R, S, O> {
Parser<I, R> inputParser;
Parser<S, O> outputParser;
public Optional<Parser<I, R>> getInputParser() {
return Optional.ofNullable(inputParser);
}
public Optional<Parser<S, O>> getOutputParser() {
return Optional.ofNullable(outputParser);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/TextCompletionTextLLMCapability.java
|
package ai.knowly.langtorch.capability.modality.text;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import ai.knowly.langtorch.processor.Processor;
import ai.knowly.langtorch.schema.text.SingleText;
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.ListenableFuture;
import javax.inject.Inject;
public class TextCompletionTextLLMCapability<I, O>
implements TextLLMCapability<I, SingleText, SingleText, O> {
private final Processor<SingleText, SingleText> processor;
private final Parsers<I, SingleText, SingleText, O> parsers;
@Inject
public TextCompletionTextLLMCapability(
Processor<SingleText, SingleText> processor, Parsers<I, SingleText, SingleText, O> parsers) {
this.processor = processor;
this.parsers = parsers;
}
@Override
public SingleText preProcess(I inputData) {
if (inputData instanceof SingleText) {
return (SingleText) inputData;
}
return parsers
.getInputParser()
.map(parser -> parser.parse(inputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Input data is not a SingleText and no input parser is present."));
}
@Override
public O postProcess(SingleText outputData) {
return parsers
.getOutputParser()
.map(parser -> parser.parse(outputData))
.orElseThrow(
() ->
new IllegalArgumentException(
"Output data is not a SingleText and no output parser is present."));
}
@Override
public O run(I inputData) {
return postProcess(processor.run(preProcess(inputData)));
}
@Override
public ListenableFuture<O> runAsync(I inputData) {
return FluentFuture.from(immediateFuture(inputData)).transform(this::run, directExecutor());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/TextLLMCapability.java
|
package ai.knowly.langtorch.capability.modality.text;
import ai.knowly.langtorch.capability.Capability;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
/** Interface for a text language model capability. */
public interface TextLLMCapability<T, I extends Input, O extends Output, R>
extends Capability<T, R> {
I preProcess(T inputData);
R postProcess(O outputData);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/capability/modality/text/TextLLMCapabilityWithMemory.java
|
package ai.knowly.langtorch.capability.modality.text;
import ai.knowly.langtorch.capability.Capability;
import ai.knowly.langtorch.schema.io.Input;
import ai.knowly.langtorch.schema.io.Output;
import ai.knowly.langtorch.store.memory.Memory;
import ai.knowly.langtorch.store.memory.MemoryContext;
import ai.knowly.langtorch.store.memory.MemoryValue;
/** Interface for a text language model capability with memory. */
public interface TextLLMCapabilityWithMemory<
T, I extends Input, O extends Output, R, V extends MemoryValue, S extends MemoryContext>
extends Capability<T, R> {
I preProcess(T inputData);
Memory<V, S> getMemory();
R postProcess(O outputData);
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/Connector.java
|
package ai.knowly.langtorch.connector;
import java.util.Optional;
/** Connector for loading data from a source. */
public interface Connector<T> {
/**
* Load data from a source.
*
* @return The loaded data.
*/
Optional<T> read();
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/ConnectorOption.java
|
package ai.knowly.langtorch.connector;
/** Interface for read options. */
public interface ConnectorOption {}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/markdown/MarkdownConnector.java
|
package ai.knowly.langtorch.connector.markdown;
import ai.knowly.langtorch.connector.Connector;
import com.google.common.flogger.FluentLogger;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Optional;
import javax.inject.Inject;
import lombok.NonNull;
/** Implementation of DocumentConnector for Md files. */
public class MarkdownConnector implements Connector<String> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final MarkdownConnectorOption readOption;
@Inject
public MarkdownConnector(@NonNull MarkdownConnectorOption readOption) {
this.readOption = readOption;
}
@Override
public Optional<String> read() {
try {
return Optional.of(
new String(
Files.readAllBytes(Paths.get(readOption.getFilePath())), StandardCharsets.UTF_8));
} catch (IOException e) {
logger.atSevere().withCause(e).log("Error reading Markdown file.");
throw new MarkdownReadException(e);
}
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/markdown/MarkdownConnectorOption.java
|
package ai.knowly.langtorch.connector.markdown;
import ai.knowly.langtorch.connector.ConnectorOption;
import lombok.Builder;
import lombok.Data;
/** Implementation of ReadOption for Markdown files. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class MarkdownConnectorOption implements ConnectorOption {
private String filePath;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/markdown/MarkdownReadException.java
|
package ai.knowly.langtorch.connector.markdown;
/** Exception thrown when a Markdown file cannot be read. */
public class MarkdownReadException extends RuntimeException {
public MarkdownReadException(Exception e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/pdf/PDFConnector.java
|
package ai.knowly.langtorch.connector.pdf;
import ai.knowly.langtorch.connector.Connector;
import com.google.common.flogger.FluentLogger;
import java.io.File;
import java.io.IOException;
import java.util.Optional;
import javax.inject.Inject;
import lombok.Cleanup;
import lombok.NonNull;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.text.PDFTextStripper;
/** Implementation of DocumentConnector for PDF files. */
public class PDFConnector implements Connector<String> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final PDFConnectorOption readOption;
@Inject
public PDFConnector(@NonNull PDFConnectorOption readOption) {
this.readOption = readOption;
}
@Override
public Optional<String> read() {
try {
PDDocument selectedDocument;
Optional<byte[]> bytes = readOption.getFileBytes();
Optional<String> filePath = readOption.getFilePath();
if (bytes.isPresent()) {
selectedDocument = PDDocument.load(bytes.get());
} else if (filePath.isPresent()) {
selectedDocument = PDDocument.load(new File(filePath.get()));
} else {
throw new PDFConnectorOptionNotFoundException("No suitable read option provided");
}
@Cleanup PDDocument document = selectedDocument;
PDFTextStripper pdfStripper = new PDFTextStripper();
return Optional.of(pdfStripper.getText(document));
} catch (IOException e) {
logger.atSevere().withCause(e).log("Error reading PDF file.");
} catch (PDFConnectorOptionNotFoundException e) {
logger.atSevere().withCause(e).log("No read option provided");
}
return Optional.empty();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/pdf/PDFConnectorOption.java
|
package ai.knowly.langtorch.connector.pdf;
import ai.knowly.langtorch.connector.ConnectorOption;
import lombok.Builder;
import lombok.Data;
import java.util.Optional;
/** Implementation of ReadOption for PDF files. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class PDFConnectorOption implements ConnectorOption {
private String filePath;
private byte[] fileBytes;
public Optional<String> getFilePath() {
return Optional.ofNullable(filePath);
}
public Optional<byte[]> getFileBytes() {
return Optional.ofNullable(fileBytes);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/pdf/PDFConnectorOptionNotFoundException.java
|
package ai.knowly.langtorch.connector.pdf;
public class PDFConnectorOptionNotFoundException extends RuntimeException {
public PDFConnectorOptionNotFoundException(String msg) {
super(msg);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/spreadsheet/SpreadSheetConnector.java
|
package ai.knowly.langtorch.connector.spreadsheet;
import ai.knowly.langtorch.connector.Connector;
import ai.knowly.langtorch.connector.spreadsheet.SpreadSheetConnectorOption.SpreadSheetFormat;
import com.google.common.flogger.FluentLogger;
import java.io.FileReader;
import java.io.IOException;
import java.util.Optional;
import javax.inject.Inject;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
/** Implementation of DocumentConnector for CSV files. */
public class SpreadSheetConnector implements Connector<String> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final SpreadSheetConnectorOption readOption;
@Inject
public SpreadSheetConnector(SpreadSheetConnectorOption readOption) {
this.readOption = readOption;
}
private CSVFormat convertCSVFormat(Optional<SpreadSheetFormat> spreadSheetFormat) {
if (!spreadSheetFormat.isPresent()) {
return CSVFormat.DEFAULT;
}
if (SpreadSheetFormat.CSV == spreadSheetFormat.get()) {
return CSVFormat.DEFAULT;
}
if (SpreadSheetFormat.EXCEL == spreadSheetFormat.get()) {
return CSVFormat.EXCEL;
}
return CSVFormat.DEFAULT;
}
@Override
public Optional<String> read() {
StringBuilder sb = new StringBuilder();
try (FileReader fileReader = new FileReader(readOption.getFilePath())) {
CSVParser csvParser =
new CSVParser(fileReader, convertCSVFormat(readOption.getSpreadSheetFormat()));
for (CSVRecord csvRecord : csvParser) {
sb.append(csvRecord.toString()).append(readOption.getSeparatorForEachLine().orElse("\n"));
}
} catch (IOException e) {
logger.atSevere().withCause(e).log("Error reading CSV file.");
throw new SpreadsheetReadException(e);
}
return Optional.of(sb.toString());
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/spreadsheet/SpreadSheetConnectorOption.java
|
package ai.knowly.langtorch.connector.spreadsheet;
import ai.knowly.langtorch.connector.ConnectorOption;
import java.util.Optional;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
/** Implementation of ReadOption for CSV files. */
@Data
@Builder(toBuilder = true, setterPrefix = "set")
public class SpreadSheetConnectorOption implements ConnectorOption {
@NonNull private String filePath;
// \n will be used by default if not specified.
private String separatorForEachLine;
private SpreadSheetFormat spreadSheetFormat;
public Optional<String> getSeparatorForEachLine() {
return Optional.ofNullable(separatorForEachLine);
}
public Optional<SpreadSheetFormat> getSpreadSheetFormat() {
return Optional.ofNullable(spreadSheetFormat);
}
public enum SpreadSheetFormat {
CSV,
EXCEL,
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/spreadsheet/SpreadsheetReadException.java
|
package ai.knowly.langtorch.connector.spreadsheet;
/** Exception thrown when a Spreadsheet file cannot be read. */
public class SpreadsheetReadException extends RuntimeException {
public SpreadsheetReadException(Exception e) {
super(e);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/ConnectionDetail.java
|
package ai.knowly.langtorch.connector.sql;
import java.util.Optional;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
@Data
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
@Builder(toBuilder = true, setterPrefix = "set")
public class ConnectionDetail {
private String url;
private String user;
private String password;
public Optional<String> getUrl() {
return Optional.ofNullable(url);
}
public Optional<String> getUser() {
return Optional.ofNullable(user);
}
public Optional<String> getPassword() {
return Optional.ofNullable(password);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/MySQLConnector.java
|
package ai.knowly.langtorch.connector.sql;
import static ai.knowly.langtorch.connector.sql.ResultSetTransform.transform;
import ai.knowly.langtorch.connector.Connector;
import com.google.common.flogger.FluentLogger;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Optional;
import javax.inject.Inject;
import lombok.NonNull;
/** MySQL loader. */
public class MySQLConnector implements Connector<Records> {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final SQLConnectorOption readOption;
@Inject
public MySQLConnector(@NonNull SQLConnectorOption readOption) {
this.readOption = readOption;
}
@Override
public Optional<Records> read() {
Optional<Connection> newConnection = openConnection(readOption);
if (!newConnection.isPresent()) {
logger.atSevere().log("Fail to open connection to MySQL Database");
return Optional.empty();
}
try (Statement stmt = newConnection.get().createStatement()) {
ResultSet resultSet = stmt.executeQuery(readOption.getQuery());
return Optional.ofNullable(transform(resultSet));
} catch (SQLException e) {
logger.atSevere().withCause(e).log("Error executing query in the MySQL Database");
return Optional.empty();
}
}
private Optional<Connection> openConnection(SQLConnectorOption readOption) {
if (readOption.getConnection().isPresent()) {
return readOption.getConnection();
}
if (!isEligibleForConnection(readOption)) {
try {
return Optional.of(
DriverManager.getConnection(
readOption.getConnectionDetail().get().getUrl().get(),
readOption.getConnectionDetail().get().getUser().get(),
readOption.getConnectionDetail().get().getPassword().get()));
} catch (SQLException e) {
logger.atSevere().withCause(e).log("Fail to establish a new connection to database");
throw new SQLExecutionException("Fail to establish a new connection to database", e);
}
}
return Optional.empty();
}
private boolean isEligibleForConnection(SQLConnectorOption readOption) {
return readOption.getConnectionDetail().isPresent()
&& readOption.getConnectionDetail().get().getUrl().isPresent()
&& readOption.getConnectionDetail().get().getUser().isPresent()
&& readOption.getConnectionDetail().get().getPassword().isPresent();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/Record.java
|
package ai.knowly.langtorch.connector.sql;
import java.util.Map;
import java.util.Optional;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Singular;
@Data
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
@Builder(toBuilder = true, setterPrefix = "set")
public class Record {
@Singular("column")
Map<String, Object> columns;
public Optional<Object> getColumn(String columnName) {
if (!columns.containsKey(columnName)) {
return Optional.empty();
}
return Optional.of(columns.get(columnName));
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/Records.java
|
package ai.knowly.langtorch.connector.sql;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Singular;
@Data
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
@Builder(toBuilder = true, setterPrefix = "set")
public class Records {
@Singular("row")
List<Record> rows;
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/ResultSetTransform.java
|
package ai.knowly.langtorch.connector.sql;
import ai.knowly.langtorch.connector.sql.Record.RecordBuilder;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import lombok.AllArgsConstructor;
/** Function to transform a ResultSet into a StorageObject. */
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
public class ResultSetTransform {
public static Records transform(ResultSet rs) throws SQLException {
ResultSetMetaData md = rs.getMetaData();
int columns = md.getColumnCount();
List<Record> list = new ArrayList<>();
while (rs.next()) {
RecordBuilder recordBuilder = Record.builder();
for (int i = 1; i <= columns; ++i) {
recordBuilder.setColumn(md.getColumnName(i), rs.getObject(i));
}
list.add(recordBuilder.build());
}
return Records.builder().setRows(list).build();
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/SQLConnectorOption.java
|
package ai.knowly.langtorch.connector.sql;
import ai.knowly.langtorch.connector.ConnectorOption;
import java.sql.Connection;
import java.util.Optional;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
@Data
@Builder(toBuilder = true, setterPrefix = "set")
@AllArgsConstructor(access = lombok.AccessLevel.PRIVATE)
public class SQLConnectorOption implements ConnectorOption {
@NonNull private String query;
private Connection connection;
private ConnectionDetail connectionDetail;
public Optional<Connection> getConnection() {
return Optional.ofNullable(connection);
}
public Optional<ConnectionDetail> getConnectionDetail() {
return Optional.ofNullable(connectionDetail);
}
}
|
0
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector
|
java-sources/ai/knowly/langtorch/0.0.17/ai/knowly/langtorch/connector/sql/SQLExecutionException.java
|
package ai.knowly.langtorch.connector.sql;
/** Exception thrown when an error occurs during SQL execution. */
public class SQLExecutionException extends RuntimeException {
public SQLExecutionException(String message, Throwable cause) {
super(message, cause);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.