index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/model/DictionaryModel.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.model;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
import ai.idylnlp.opennlp.custom.encryption.OpenNLPEncryptionFactory;
import ai.idylnlp.model.manifest.StandardModelManifest;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.namefind.DictionaryNameFinder;
import opennlp.tools.namefind.TokenNameFinder;
import opennlp.tools.tokenize.Tokenizer;
import opennlp.tools.util.StringList;
import opennlp.tools.util.model.BaseModel;
/**
* A model that uses a dictionary to identify entities.
*
* @author Mountain Fog, Inc.
*
*/
public class DictionaryModel extends BaseModel {
private static final long serialVersionUID = 1L;
private StandardModelManifest modelManifest;
private String modelDirectory;
/**
* Creates a new dictionary model and initializes the {@link TokenNameFinder}.
* @param modelManifest The {@link StandardModelManifest}.
* @param modelDirectory The model directory.
* @throws IOException
*/
public DictionaryModel(StandardModelManifest modelManifest, String modelDirectory) throws Exception {
this.modelManifest = modelManifest;
this.modelDirectory = modelDirectory;
}
@Override
public String getModelId() {
return modelManifest.getModelId();
}
/**
* Gets the configured {@link TokenNameFinder} for the dictionary model.
* @param tokenizer A {@link Tokenizer} used to tokenize each line in the dictionary file.
* @return A {@link TokenNameFinder}.
* @throws Exception
*/
public TokenNameFinder getDictionaryNameFinder(Tokenizer tokenizer) throws Exception {
final boolean caseSensitive = false;
final Dictionary dictionary = new Dictionary(caseSensitive);
File modelFile = new File(modelDirectory + File.separator + modelManifest.getModelFileName());
try (BufferedReader br = new BufferedReader(new FileReader(modelFile))) {
String line;
while ((line = br.readLine()) != null) {
if(!StringUtils.isEmpty(modelManifest.getEncryptionKey())) {
line = OpenNLPEncryptionFactory.getDefault().decrypt(line, modelManifest.getEncryptionKey());
}
final String[] tokenized = tokenizer.tokenize(line);
// StringList tokens = new StringList("George", "Washington");
StringList tokens = new StringList(tokenized);
dictionary.put(tokens);
}
}
return new DictionaryNameFinder(dictionary, modelManifest.getType());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/modelloader/LocalModelLoader.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.modelloader;
import java.io.File;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.zoo.IdylNLPModelZoo;
import opennlp.tools.util.model.BaseModel;
/**
* A model loader for using models from the local file system.
* @author Mountain Fog, Inc.
*
*/
public final class LocalModelLoader<T extends BaseModel> extends ModelLoader<T> {
private static final Logger LOGGER = LogManager.getLogger(LocalModelLoader.class);
/**
* Creates a local model loader.
* @param modelValidator A {@link ModelValidator} to validate the model prior to loading.
* @param modelDirectory The directory on the local file system that contains the models.
*/
public LocalModelLoader(ModelValidator modelValidator, String modelDirectory) {
super(modelValidator);
if(!modelDirectory.endsWith(File.separator)) {
modelDirectory = modelDirectory + File.separator;
}
LOGGER.info("Using local model loader directory {}", modelDirectory);
super.setModelDirectory(modelDirectory);
}
/**
* Creates a local model loader.
* @param modelValidator A {@link ModelValidator} to validate the model prior to loading.
* @param modelDirectory The directory on the local file system that contains the models.
* @param idylNlpModelZoo A {@link IdylNLPModelZoo} client.
*/
public LocalModelLoader(ModelValidator modelValidator, String modelDirectory, IdylNLPModelZoo idylNlpModelZoo) {
this(modelValidator, modelDirectory);
super.setIdylNLPModelZoo(idylNlpModelZoo);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/modelloader/ModelLoader.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.modelloader;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.model.exceptions.ModelLoaderException;
import ai.idylnlp.model.exceptions.ValidationException;
import ai.idylnlp.model.manifest.StandardModelManifest;
import ai.idylnlp.opennlp.custom.encryption.OpenNLPEncryptionFactory;
import ai.idylnlp.opennlp.custom.model.DictionaryModel;
import ai.idylnlp.zoo.IdylNLPModelZoo;
import opennlp.tools.cmdline.namefind.TokenNameFinderModelLoader;
import opennlp.tools.lemmatizer.LemmatizerModel;
import opennlp.tools.namefind.TokenNameFinderModel;
import opennlp.tools.postag.POSModel;
import opennlp.tools.sentdetect.SentenceModel;
import opennlp.tools.tokenize.TokenizerModel;
import opennlp.tools.util.model.BaseModel;
/**
* Abstract superclass for model loaders. Extend this class to implement
* custom model loaders to support different environment needs.
* @author Mountain Fog, Inc.
* @param <T> Class extending {@link BaseModel}.
*
*/
public abstract class ModelLoader<T extends BaseModel> {
private static final Logger LOGGER = LogManager.getLogger(ModelLoader.class);
// This map holds models for all types - tokenizer, sentences, and entity models.
private Map<StandardModelManifest, T> models = new HashMap<StandardModelManifest, T>();
private String modelDirectory;
private Class<T> typeParameterClass;
private ModelValidator modelValidator;
private IdylNLPModelZoo idylNlpModelZoo;
public ModelLoader(ModelValidator modelValidator) {
this.modelValidator = modelValidator;
}
/**
* Gets the model.
* @param modelManifest The model's {@link StandardModelManifest manifest}.
* @param typeParameterClass The class of the model.
* @return A class extending {@link BaseModel}.
* @throws Exception An IOException is thrown if the model can not be loaded. This will happen
* in cases where the model does not exist, the model is corrupted, or the model file can not
* be read by the process.
*/
public T getModel(StandardModelManifest modelManifest, Class<T> typeParameterClass) throws ModelLoaderException {
this.typeParameterClass = typeParameterClass;
T tnfm = null;
if(models.get(modelManifest) != null) {
// We have previously loaded this model.
// Just get it and return it.
tnfm = models.get(modelManifest);
} else {
LOGGER.debug("Model has not been loaded - going to load.");
try {
// We need to load this model first.
tnfm = loadModel(modelManifest);
} catch (Exception ex) {
LOGGER.error("Unable to load model: " + modelManifest.getModelFileName(), ex);
throw new ModelLoaderException("Unable to load model: " + modelManifest.getModelFileName(), ex);
}
// Null?
if(tnfm == null) {
throw new ModelLoaderException("Unable to load model: " + modelManifest.getModelFileName());
} else {
// Now put the model into the map.
models.put(modelManifest, tnfm);
}
}
return tnfm;
}
private T loadModel(StandardModelManifest modelManifest) throws Exception {
final String fullModelFileName = modelDirectory + modelManifest.getModelFileName();
LOGGER.debug("Loading model from: " + fullModelFileName);
// Does this file exist?
final File modelFile = new File(fullModelFileName);
if(!modelFile.exists()) {
if(idylNlpModelZoo != null) {
LOGGER.info("Attempting to download model {} from the Idyl NLP zoo.", modelManifest.getModelId());
// Try to download the model from the zoo.
idylNlpModelZoo.downloadModel(modelManifest.getModelId(), modelFile);
}
}
// Model loading will always take a decent amount of time
// so milliseconds is a decent measure here.
long startTime = System.currentTimeMillis();
T tnfm = loadModelFromDisk(modelManifest, modelFile);
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
LOGGER.debug("Model(s) loaded in " + duration + " milliseconds.");
return tnfm;
}
/**
* Load the model from the given path.
* @param modelFilePath The file path to the model.
* @return The TokenNameFinderModel of the model.
*/
@SuppressWarnings("unchecked")
private T loadModelFromDisk(StandardModelManifest modelManifest, File modelFile) throws Exception {
LOGGER.debug("Loading model from disk: " + modelFile.getAbsolutePath());
OpenNLPEncryptionFactory.getDefault().setKey(modelManifest.getEncryptionKey());
T model = null;
// Load the model into memory based on the type.
if(typeParameterClass.isAssignableFrom(TokenNameFinderModel.class)) {
// Load a token name finder model.
model = (T) new TokenNameFinderModelLoader().load(modelFile);
} else if(typeParameterClass.isAssignableFrom(SentenceModel.class)) {
// Load a sentence model.
model = (T) new SentenceModel(modelFile);
} else if(typeParameterClass.isAssignableFrom(TokenizerModel.class)) {
// Load a tokenizer model.
model = (T) new TokenizerModel(modelFile);
} else if(typeParameterClass.isAssignableFrom(POSModel.class)) {
// Load a part-of-speech model.
model = (T) new POSModel(modelFile);
} else if(typeParameterClass.isAssignableFrom(LemmatizerModel.class)) {
// Load a lemmatizer model.
model = (T) new LemmatizerModel(modelFile);
} else if(typeParameterClass.isAssignableFrom(DictionaryModel.class)) {
// Load a dictionary model.
model = (T) new DictionaryModel(modelManifest, this.getModelDirectory());
} else {
LOGGER.warn("Invalid class of model: {}", typeParameterClass.toString());
}
try {
// Make sure the model.id in the model matches the model.id in the manifest.
if(StringUtils.equals(model.getModelId(), modelManifest.getModelId())) {
if(modelValidator != null) {
// Validate the model.
if(!modelValidator.validate(modelManifest)) {
LOGGER.warn("Version verification failed.");
// Since version validation failed we will set the model to null.
model = null;
}
} else {
// Even though the validator is null validation is allowed to be successful.
LOGGER.warn("The model validator was null.");
}
} else {
LOGGER.warn("The model manifest for model {} is not valid.", modelManifest.getModelFileName());
}
} catch (ValidationException ex) {
LOGGER.error("Idyl NLP license key validation failed loading model.", ex);
model = null;
}
OpenNLPEncryptionFactory.getDefault().clearKey();
return model;
}
/**
* Gets the directory containing the models.
* @return The directory containing the models.
*/
public String getModelDirectory() {
return modelDirectory;
}
/**
* Sets the model directory.
* @param modelDirectory The directory containing the models. This should be a
* directory on the local file system.
*/
public void setModelDirectory(String modelDirectory) {
this.modelDirectory = modelDirectory;
}
/**
* Sets the {@link IdylNLPModelZoo} client.
* @param idylNLPModelZoo A {@link IdylNLPModelZoo client}.
*/
public void setIdylNLPModelZoo(IdylNLPModelZoo idylNLPModelZoo) {
this.idylNlpModelZoo = idylNLPModelZoo;
}
/**
* Gets the map of models to file names.
* @return A map of models to file names.
*/
public Map<StandardModelManifest, T> getModels() {
return models;
}
/**
* Sets the model map.
* @param modelMap The model map which is a map of
* models to file names.
*/
public void setModels(Map<StandardModelManifest, T> modelMap) {
this.models = modelMap;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/nlp
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/nlp/lemmatization/DefaultLemmatizer.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.nlp.lemmatization;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.model.exceptions.ModelLoaderException;
import ai.idylnlp.model.manifest.StandardModelManifest;
import ai.idylnlp.model.nlp.lemma.Lemmatizer;
import ai.idylnlp.opennlp.custom.modelloader.LocalModelLoader;
import opennlp.tools.lemmatizer.DictionaryLemmatizer;
import opennlp.tools.lemmatizer.LemmatizerME;
import opennlp.tools.lemmatizer.LemmatizerModel;
/**
* Default implementation of {@link Lemmatizer} that uses OpenNLP's
* lemmatizing capabilities to provide dictionary-based and
* model-based lemmatization.
*
* @author Mountain Fog, Inc.
*
*/
public class DefaultLemmatizer implements Lemmatizer {
private static final Logger LOGGER = LogManager.getLogger(DefaultLemmatizer.class);
private opennlp.tools.lemmatizer.Lemmatizer lemmatizer;
private boolean isModelBased;
/**
* Creates a new lemmatizer that uses a dictionary.
* @param dictionary The full path to the dictionary file.
* @throws IOException Thrown if the dictionary cannot be opened.
*/
public DefaultLemmatizer(String dictionary) throws IOException {
isModelBased = false;
InputStream dictLemmatizer = new FileInputStream(dictionary);
lemmatizer = new DictionaryLemmatizer(dictLemmatizer);
dictLemmatizer.close();
}
/**
* Creates a new model-based lemmatizer.
* @param modelPath The full path to the directory containing the model.
* @param modelManifest The {@link StandardModelManifest manifest} of the lemmatizer model.
* @param validator The {@link ModelValidator} used to validate the model.
* @throws ModelLoaderException Thrown if the model cannot be loaded.
*/
public DefaultLemmatizer(String modelPath, StandardModelManifest modelManifest, ModelValidator validator) throws ModelLoaderException {
isModelBased = true;
LocalModelLoader<LemmatizerModel> lemmaModelLoader = new LocalModelLoader<LemmatizerModel>(validator, modelPath);
LemmatizerModel model = lemmaModelLoader.getModel(modelManifest, LemmatizerModel.class);
lemmatizer = new LemmatizerME(model);
}
/**
* {@inheritDoc}
* <p>
* How the lemmatization is performed depends on which constructor
* was used to create the class. The lemmatization could be
* dictionary-based or model-based.
*/
@Override
public String[] lemmatize(String[] tokens, String[] posTags) {
String[] lemmas = lemmatizer.lemmatize(tokens, posTags);
if(isModelBased) {
// Must call decodeLemmas for model-based lemmatization.
lemmas = ((LemmatizerME) lemmatizer).decodeLemmas(tokens, lemmas);
}
return lemmas;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/nlp
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/nlp/pos/DefaultPartsOfSpeechTagger.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.nlp.pos;
import java.util.LinkedList;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.model.exceptions.ModelLoaderException;
import ai.idylnlp.model.manifest.StandardModelManifest;
import ai.idylnlp.model.nlp.SentenceDetector;
import ai.idylnlp.model.nlp.Tokenizer;
import ai.idylnlp.model.nlp.pos.PartsOfSpeechTagger;
import ai.idylnlp.model.nlp.pos.PartsOfSpeechToken;
import ai.idylnlp.opennlp.custom.modelloader.LocalModelLoader;
import opennlp.tools.postag.POSModel;
import opennlp.tools.postag.POSTaggerME;
/**
* A part of speech (POS) tagger that uses OpenNLP's tagging capabilities.
*
* @author Mountain Fog, Inc.
*
*/
public class DefaultPartsOfSpeechTagger implements PartsOfSpeechTagger {
private static final Logger LOGGER = LogManager.getLogger(DefaultPartsOfSpeechTagger.class);
private POSTaggerME tagger;
/**
* Creates a tagger.
* @param modelFile The part of speech model file.
* @param sentenceDetector A {@link SentenceDetector}.
* @param tokenizer A {@link Tokenizer}.
* @throws ModelLoaderException
*/
public DefaultPartsOfSpeechTagger(String modelPath, StandardModelManifest modelManifest, ModelValidator validator) throws ModelLoaderException {
LocalModelLoader<POSModel> posModelLoader = new LocalModelLoader<POSModel>(validator, modelPath);
POSModel model = posModelLoader.getModel(modelManifest, POSModel.class);
tagger = new POSTaggerME(model);
}
/**
* {@inheritDoc}
*/
@Override
public List<PartsOfSpeechToken> tag(String input, SentenceDetector sentenceDetector, Tokenizer tokenizer) {
String[] sentences = sentenceDetector.sentDetect(input);
List<PartsOfSpeechToken> partsOfSpeechTokens = new LinkedList<PartsOfSpeechToken>();
for (String sentence : sentences) {
String tokenizedSentence[] = tokenizer.tokenize(sentence);
String[] tags = tagger.tag(tokenizedSentence);
for (int i = 0; i < tokenizedSentence.length; i++) {
final String token = tokenizedSentence[i].trim();
final String tag = tags[i].trim();
partsOfSpeechTokens.add(new PartsOfSpeechToken(token, tag));
}
}
return partsOfSpeechTokens;
}
/**
* {@inheritDoc}
*/
@Override
public List<PartsOfSpeechToken> tag(String[] sentences, Tokenizer tokenizer) {
List<PartsOfSpeechToken> partsOfSpeechTokens = new LinkedList<PartsOfSpeechToken>();
for (String sentence : sentences) {
String tokenizedSentence[] = tokenizer.tokenize(sentence);
String[] tags = tagger.tag(tokenizedSentence);
for (int i = 0; i < tokenizedSentence.length; i++) {
final String token = tokenizedSentence[i].trim();
final String tag = tags[i].trim();
partsOfSpeechTokens.add(new PartsOfSpeechToken(token, tag));
}
}
return partsOfSpeechTokens;
}
/**
* {@inheritDoc}
*/
@Override
public List<PartsOfSpeechToken> tag(String[] tokenizedSentence) {
List<PartsOfSpeechToken> partsOfSpeechTokens = new LinkedList<PartsOfSpeechToken>();
String[] tags = tagger.tag(tokenizedSentence);
for (int i = 0; i < tokenizedSentence.length; i++) {
final String token = tokenizedSentence[i].trim();
final String tag = tags[i].trim();
partsOfSpeechTokens.add(new PartsOfSpeechToken(token, tag));
}
return partsOfSpeechTokens;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/utils/SpansToSpans.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.utils;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.util.Span;
/**
* Utility class for converting between OpenNLP's Span and Idyl NLP's Span.
*
* @author Mountain Fog, Inc.
*
*/
public class SpansToSpans {
private SpansToSpans() {
}
/**
* Converts an array of OpenNLP Spans to Idyl SDK Spans.
* @param spans An array of OpenNLP Spans.
* @return An array of Idyl SDK Spans.
*/
public static ai.idylnlp.model.nlp.Span[] toSpans(Span[] spans) {
List<ai.idylnlp.model.nlp.Span> s = new LinkedList<ai.idylnlp.model.nlp.Span>();
for(opennlp.tools.util.Span span : spans) {
s.add(new ai.idylnlp.model.nlp.Span(span.getStart(), span.getEnd(), span.getType(), span.getProb()));
}
return s.toArray(new ai.idylnlp.model.nlp.Span[s.size()]);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/ai/idylnlp/opennlp/custom/validators/TrueModelValidator.java
|
/*******************************************************************************
* Copyright 2018 Mountain Fog, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package ai.idylnlp.opennlp.custom.validators;
import ai.idylnlp.model.ModelValidator;
import ai.idylnlp.model.exceptions.ValidationException;
import ai.idylnlp.model.manifest.ModelManifest;
/**
* A model validator that always returns true. Useful for testing or
* when no validation is needed.
*
* @author Mountain Fog, Inc.
*
*/
public class TrueModelValidator implements ModelValidator {
@Override
public boolean validate(ModelManifest manifest) throws ValidationException {
return true;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/package-info.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains packages which solve common NLP tasks.
*/
package opennlp.tools;
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkSample.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import opennlp.tools.util.Span;
/**
* Class for holding chunks for a single unit of text.
*/
public class ChunkSample implements Serializable {
private final List<String> sentence;
private final List<String> tags;
private final List<String> preds;
/**
* Initializes the current instance.
*
* @param sentence
* training sentence
* @param tags
* POS Tags for the sentence
* @param preds
* Chunk tags in B-* I-* notation
*/
public ChunkSample(String[] sentence, String[] tags, String[] preds) {
validateArguments(sentence.length, tags.length, preds.length);
this.sentence = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(sentence)));
this.tags = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(tags)));
this.preds = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(preds)));
}
/**
* Initializes the current instance.
*
* @param sentence
* training sentence
* @param tags
* POS Tags for the sentence
* @param preds
* Chunk tags in B-* I-* notation
*/
public ChunkSample(List<String> sentence, List<String> tags, List<String> preds) {
validateArguments(sentence.size(), tags.size(), preds.size());
this.sentence = Collections.unmodifiableList(new ArrayList<>(sentence));
this.tags = Collections.unmodifiableList(new ArrayList<>(tags));
this.preds = Collections.unmodifiableList(new ArrayList<>(preds));
}
/** Gets the training sentence */
public String[] getSentence() {
return sentence.toArray(new String[sentence.size()]);
}
/** Gets the POS Tags for the sentence */
public String[] getTags() {
return tags.toArray(new String[tags.size()]);
}
/** Gets the Chunk tags in B-* I-* notation */
public String[] getPreds() {
return preds.toArray(new String[preds.size()]);
}
/** Gets the phrases as an array of spans */
public Span[] getPhrasesAsSpanList() {
return phrasesAsSpanList(getSentence(), getTags(), getPreds());
}
/**
* Static method to create arrays of spans of phrases
*
* @param aSentence
* training sentence
* @param aTags
* POS Tags for the sentence
* @param aPreds
* Chunk tags in B-* I-* notation
*
* @return the phrases as an array of spans
*/
public static Span[] phrasesAsSpanList(String[] aSentence, String[] aTags,
String[] aPreds) {
validateArguments(aSentence.length, aTags.length, aPreds.length);
// initialize with the list maximum size
List<Span> phrases = new ArrayList<>(aSentence.length);
String startTag = "";
int startIndex = 0;
boolean foundPhrase = false;
for (int ci = 0, cn = aPreds.length; ci < cn; ci++) {
String pred = aPreds[ci];
if (pred.startsWith("B-")
|| !pred.equals("I-" + startTag) && !pred.equals("O")) { // start
if (foundPhrase) { // handle the last
phrases.add(new Span(startIndex, ci, startTag));
}
startIndex = ci;
startTag = pred.substring(2);
foundPhrase = true;
} else if (pred.equals("I-" + startTag)) { // middle
// do nothing
} else if (foundPhrase) { // end
phrases.add(new Span(startIndex, ci, startTag));
foundPhrase = false;
startTag = "";
}
}
if (foundPhrase) { // leftover
phrases.add(new Span(startIndex, aPreds.length, startTag));
}
return phrases.toArray(new Span[phrases.size()]);
}
private static void validateArguments(int sentenceSize, int tagsSize, int predsSize)
throws IllegalArgumentException {
if (sentenceSize != tagsSize || tagsSize != predsSize)
throw new IllegalArgumentException(
"All arrays must have the same length: " +
"sentenceSize: " + sentenceSize +
", tagsSize: " + tagsSize +
", predsSize: " + predsSize + "!");
}
/**
* Creates a nice to read string for the phrases formatted as following: <br>
* <code>
* [NP Rockwell_NNP ] [VP said_VBD ] [NP the_DT agreement_NN ] [VP calls_VBZ ] [SBAR for_IN ]
* [NP it_PRP ] [VP to_TO supply_VB ] [NP 200_CD additional_JJ so-called_JJ shipsets_NNS ]
* [PP for_IN ] [NP the_DT planes_NNS ] ._.
* </code>
*
* @return a nice to read string representation of the chunk phases
*/
public String nicePrint() {
Span[] spans = getPhrasesAsSpanList();
StringBuilder result = new StringBuilder(" ");
for (int tokenIndex = 0; tokenIndex < sentence.size(); tokenIndex++) {
for (Span span : spans) {
if (span.getStart() == tokenIndex) {
result.append("[").append(span.getType()).append(" ");
}
if (span.getEnd() == tokenIndex) {
result.append("]").append(' ');
}
}
result.append(sentence.get(tokenIndex)).append("_").append(tags.get(tokenIndex)).append(' ');
}
if (sentence.size() > 1)
result.setLength(result.length() - 1);
for (Span span : spans) {
if (span.getEnd() == sentence.size()) {
result.append(']');
}
}
return result.toString();
}
@Override
public String toString() {
StringBuilder chunkString = new StringBuilder();
for (int ci = 0; ci < preds.size(); ci++) {
chunkString.append(sentence.get(ci)).append(" ").append(tags.get(ci))
.append(" ").append(preds.get(ci)).append("\n");
}
return chunkString.toString();
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(getSentence()),
Arrays.hashCode(getTags()), Arrays.hashCode(getPreds()));
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof ChunkSample) {
ChunkSample a = (ChunkSample) obj;
return Arrays.equals(getSentence(), a.getSentence())
&& Arrays.equals(getTags(), a.getTags())
&& Arrays.equals(getPreds(), a.getPreds());
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkSampleSequenceStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.IOException;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.Sequence;
import opennlp.tools.ml.model.SequenceStream;
import opennlp.tools.util.ObjectStream;
public class ChunkSampleSequenceStream implements SequenceStream {
private final ObjectStream<ChunkSample> samples;
private final ChunkerContextGenerator contextGenerator;
public ChunkSampleSequenceStream(ObjectStream<ChunkSample> samples,
ChunkerContextGenerator contextGenerator) {
this.samples = samples;
this.contextGenerator = contextGenerator;
}
@Override
public Sequence read() throws IOException {
ChunkSample sample = samples.read();
if (sample != null) {
String[] sentence = sample.getSentence();
String[] tags = sample.getTags();
Event[] events = new Event[sentence.length];
for (int i = 0; i < sentence.length; i++) {
// it is safe to pass the tags as previous tags because
// the context generator does not look for non predicted tags
String[] context = contextGenerator.getContext(i, sentence, tags, null);
events[i] = new Event(tags[i], context);
}
return new Sequence<>(events,sample);
}
return null;
}
@Override
public Event[] updateContext(Sequence sequence, AbstractModel model) {
// TODO: Should be implemented for Perceptron sequence learning ...
return null;
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
samples.reset();
}
@Override
public void close() throws IOException {
samples.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkSampleStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import opennlp.tools.util.FilterObjectStream;
import opennlp.tools.util.ObjectStream;
/**
* Parses the conll 2000 shared task shallow parser training data.
* <p>
* Data format is specified on the conll page:<br>
* <a href="http://www.cnts.ua.ac.be/conll2000/chunking/">
* http://www.cnts.ua.ac.be/conll2000/chunking/</a>
*/
public class ChunkSampleStream extends FilterObjectStream<String, ChunkSample> {
/**
* Initializes the current instance.
*
* @param samples a plain text line stream
*/
public ChunkSampleStream(ObjectStream<String> samples) {
super(samples);
}
public ChunkSample read() throws IOException {
List<String> toks = new ArrayList<>();
List<String> tags = new ArrayList<>();
List<String> preds = new ArrayList<>();
for (String line = samples.read(); line != null && !line.equals(""); line = samples.read()) {
String[] parts = line.split(" ");
if (parts.length != 3) {
System.err.println("Skipping corrupt line: " + line);
}
else {
toks.add(parts[0]);
tags.add(parts[1]);
preds.add(parts[2]);
}
}
if (toks.size() > 0) {
return new ChunkSample(toks.toArray(new String[toks.size()]),
tags.toArray(new String[tags.size()]), preds.toArray(new String[preds.size()]));
}
return null;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/Chunker.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.Sequence;
import opennlp.tools.util.Span;
/**
* The interface for chunkers which provide chunk tags for a sequence of tokens.
*/
public interface Chunker {
/**
* Generates chunk tags for the given sequence returning the result in an array.
*
* @param toks an array of the tokens or words of the sequence.
* @param tags an array of the pos tags of the sequence.
*
* @return an array of chunk tags for each token in the sequence.
*/
String[] chunk(String[] toks, String[] tags);
/**
* Generates tagged chunk spans for the given sequence returning the result in a span array.
*
* @param toks an array of the tokens or words of the sequence.
* @param tags an array of the pos tags of the sequence.
*
* @return an array of spans with chunk tags for each chunk in the sequence.
*/
Span[] chunkAsSpans(String[] toks, String[] tags);
/**
* Returns the top k chunk sequences for the specified sentence with the specified pos-tags
* @param sentence The tokens of the sentence.
* @param tags The pos-tags for the specified sentence.
*
* @return the top k chunk sequences for the specified sentence.
*/
Sequence[] topKSequences(String[] sentence, String[] tags);
/**
* Returns the top k chunk sequences for the specified sentence with the specified pos-tags
* @param sentence The tokens of the sentence.
* @param tags The pos-tags for the specified sentence.
* @param minSequenceScore A lower bound on the score of a returned sequence.
*
* @return the top k chunk sequences for the specified sentence.
*/
Sequence[] topKSequences(String[] sentence, String[] tags, double minSequenceScore);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.BeamSearchContextGenerator;
import opennlp.tools.util.TokenTag;
/**
* Interface for the context generator used in syntactic chunking.
*/
public interface ChunkerContextGenerator extends BeamSearchContextGenerator<TokenTag> {
/**
* Returns the contexts for chunking of the specified index.
* @param i The index of the token in the specified toks array for which the context should be constructed.
* @param toks The tokens of the sentence. The <code>toString</code> methods of these objects
* should return the token text.
* @param tags The POS tags for the the specified tokens.
* @param preds The previous decisions made in the taging of this sequence.
* Only indices less than i will be examined.
* @return An array of predictive contexts on which a model basis its decisions.
*/
String[] getContext(int i, String[] toks, String[] tags, String[] preds);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerCrossValidator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.IOException;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.eval.CrossValidationPartitioner;
import opennlp.tools.util.eval.FMeasure;
public class ChunkerCrossValidator {
private final String languageCode;
private final TrainingParameters params;
private FMeasure fmeasure = new FMeasure();
private ChunkerEvaluationMonitor[] listeners;
private ChunkerFactory chunkerFactory;
public ChunkerCrossValidator(String languageCode, TrainingParameters params,
ChunkerFactory factory, ChunkerEvaluationMonitor... listeners) {
this.chunkerFactory = factory;
this.languageCode = languageCode;
this.params = params;
this.listeners = listeners;
}
/**
* Starts the evaluation.
*
* @param samples
* the data to train and test
* @param nFolds
* number of folds
*
* @throws IOException
*/
public void evaluate(ObjectStream<ChunkSample> samples, int nFolds)
throws IOException {
CrossValidationPartitioner<ChunkSample> partitioner = new CrossValidationPartitioner<>(
samples, nFolds);
while (partitioner.hasNext()) {
CrossValidationPartitioner.TrainingSampleStream<ChunkSample> trainingSampleStream = partitioner
.next();
ChunkerModel model = ChunkerME.train(languageCode, trainingSampleStream,
params, chunkerFactory);
// do testing
ChunkerEvaluator evaluator = new ChunkerEvaluator(new ChunkerME(model), listeners);
evaluator.evaluate(trainingSampleStream.getTestSampleStream());
fmeasure.mergeInto(evaluator.getFMeasure());
}
}
public FMeasure getFMeasure() {
return fmeasure;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerEvaluationMonitor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.eval.EvaluationMonitor;
public interface ChunkerEvaluationMonitor extends EvaluationMonitor<ChunkSample> {
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerEvaluator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.eval.Evaluator;
import opennlp.tools.util.eval.FMeasure;
/**
* The {@link ChunkerEvaluator} measures the performance
* of the given {@link Chunker} with the provided
* reference {@link ChunkSample}s.
*
* @see Evaluator
* @see Chunker
* @see ChunkSample
*/
public class ChunkerEvaluator extends Evaluator<ChunkSample> {
private FMeasure fmeasure = new FMeasure();
/**
* The {@link Chunker} used to create the predicted
* {@link ChunkSample} objects.
*/
private Chunker chunker;
/**
* Initializes the current instance with the given
* {@link Chunker}.
*
* @param chunker the {@link Chunker} to evaluate.
* @param listeners evaluation listeners
*/
public ChunkerEvaluator(Chunker chunker, ChunkerEvaluationMonitor... listeners) {
super(listeners);
this.chunker = chunker;
}
/**
* Evaluates the given reference {@link ChunkSample} object.
*
* This is done by finding the phrases with the
* {@link Chunker} in the sentence from the reference
* {@link ChunkSample}. The found phrases are then used to
* calculate and update the scores.
*
* @param reference the reference {@link ChunkSample}.
*
* @return the predicted sample
*/
@Override
protected ChunkSample processSample(ChunkSample reference) {
String[] preds = chunker.chunk(reference.getSentence(), reference.getTags());
ChunkSample result = new ChunkSample(reference.getSentence(), reference.getTags(), preds);
fmeasure.updateScores(reference.getPhrasesAsSpanList(), result.getPhrasesAsSpanList());
return result;
}
public FMeasure getFMeasure() {
return fmeasure;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import opennlp.tools.ml.model.Event;
import opennlp.tools.util.AbstractEventStream;
import opennlp.tools.util.ObjectStream;
/**
* Class for creating an event stream out of data files for training a chunker.
*/
public class ChunkerEventStream extends AbstractEventStream<ChunkSample> {
private ChunkerContextGenerator cg;
/**
* Creates a new event stream based on the specified data stream using the specified context generator.
* @param d The data stream for this event stream.
* @param cg The context generator which should be used in the creation of events for this event stream.
*/
public ChunkerEventStream(ObjectStream<ChunkSample> d, ChunkerContextGenerator cg) {
super(d);
this.cg = cg;
}
@Override
protected Iterator<Event> createEvents(ChunkSample sample) {
if (sample != null) {
List<Event> events = new ArrayList<>();
String[] toksArray = sample.getSentence();
String[] tagsArray = sample.getTags();
String[] predsArray = sample.getPreds();
for (int ei = 0, el = sample.getSentence().length; ei < el; ei++) {
events.add(new Event(predsArray[ei], cg.getContext(ei,toksArray,tagsArray,predsArray)));
}
return events.iterator();
}
else {
return Collections.emptyListIterator();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.BaseToolFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.TokenTag;
import opennlp.tools.util.ext.ExtensionLoader;
public class ChunkerFactory extends BaseToolFactory {
/**
* Creates a {@link ChunkerFactory} that provides the default implementation
* of the resources.
*/
public ChunkerFactory() {
}
public static ChunkerFactory create(String subclassName)
throws InvalidFormatException {
if (subclassName == null) {
// will create the default factory
return new ChunkerFactory();
}
try {
return ExtensionLoader.instantiateExtension(ChunkerFactory.class, subclassName);
} catch (Exception e) {
String msg = "Could not instantiate the " + subclassName
+ ". The initialization throw an exception.";
System.err.println(msg);
e.printStackTrace();
throw new InvalidFormatException(msg, e);
}
}
@Override
public void validateArtifactMap() throws InvalidFormatException {
// no additional artifacts
}
public SequenceValidator<TokenTag> getSequenceValidator() {
return new DefaultChunkerSequenceValidator();
}
public ChunkerContextGenerator getContextGenerator() {
return new DefaultChunkerContextGenerator();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerME.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import opennlp.tools.ml.BeamSearch;
import opennlp.tools.ml.EventTrainer;
import opennlp.tools.ml.SequenceTrainer;
import opennlp.tools.ml.TrainerFactory;
import opennlp.tools.ml.TrainerFactory.TrainerType;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Sequence;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.Span;
import opennlp.tools.util.TokenTag;
import opennlp.tools.util.TrainingParameters;
/**
* The class represents a maximum-entropy-based chunker. Such a chunker can be used to
* find flat structures based on sequence inputs such as noun phrases or named entities.
*/
public class ChunkerME implements Chunker {
public static final int DEFAULT_BEAM_SIZE = 10;
private Sequence bestSequence;
/**
* The model used to assign chunk tags to a sequence of tokens.
*/
protected SequenceClassificationModel<TokenTag> model;
private ChunkerContextGenerator contextGenerator;
private SequenceValidator<TokenTag> sequenceValidator;
/**
* Initializes the current instance with the specified model and
* the specified beam size.
*
* @param model The model for this chunker.
* @param beamSize The size of the beam that should be used when decoding sequences.
* @param sequenceValidator The {@link SequenceValidator} to determines whether the outcome
* is valid for the preceding sequence. This can be used to implement constraints
* on what sequences are valid.
* @deprecated Use {@link #ChunkerME(ChunkerModel, int)} instead and use the {@link ChunkerFactory}
* to configure the {@link SequenceValidator} and {@link ChunkerContextGenerator}.
*/
@Deprecated
private ChunkerME(ChunkerModel model, int beamSize, SequenceValidator<TokenTag> sequenceValidator,
ChunkerContextGenerator contextGenerator) {
this.sequenceValidator = sequenceValidator;
this.contextGenerator = contextGenerator;
if (model.getChunkerSequenceModel() != null) {
this.model = model.getChunkerSequenceModel();
}
else {
this.model = new opennlp.tools.ml.BeamSearch<>(beamSize,
model.getChunkerModel(), 0);
}
}
/**
* Initializes the current instance with the specified model and
* the specified beam size.
*
* @param model The model for this chunker.
* @param beamSize The size of the beam that should be used when decoding sequences.
*
* @deprecated beam size is now stored inside the model
*/
@Deprecated
private ChunkerME(ChunkerModel model, int beamSize) {
contextGenerator = model.getFactory().getContextGenerator();
sequenceValidator = model.getFactory().getSequenceValidator();
if (model.getChunkerSequenceModel() != null) {
this.model = model.getChunkerSequenceModel();
}
else {
this.model = new opennlp.tools.ml.BeamSearch<>(beamSize,
model.getChunkerModel(), 0);
}
}
/**
* Initializes the current instance with the specified model.
* The default beam size is used.
*
* @param model
*/
public ChunkerME(ChunkerModel model) {
this(model, DEFAULT_BEAM_SIZE);
}
public String[] chunk(String[] toks, String[] tags) {
TokenTag[] tuples = TokenTag.create(toks, tags);
bestSequence = model.bestSequence(tuples, new Object[] {}, contextGenerator, sequenceValidator);
List<String> c = bestSequence.getOutcomes();
return c.toArray(new String[c.size()]);
}
public Span[] chunkAsSpans(String[] toks, String[] tags) {
String[] preds = chunk(toks, tags);
return ChunkSample.phrasesAsSpanList(toks, tags, preds);
}
public Sequence[] topKSequences(String[] sentence, String[] tags) {
TokenTag[] tuples = TokenTag.create(sentence, tags);
return model.bestSequences(DEFAULT_BEAM_SIZE, tuples,
new Object[] { }, contextGenerator, sequenceValidator);
}
public Sequence[] topKSequences(String[] sentence, String[] tags, double minSequenceScore) {
TokenTag[] tuples = TokenTag.create(sentence, tags);
return model.bestSequences(DEFAULT_BEAM_SIZE, tuples, new Object[] { }, minSequenceScore,
contextGenerator, sequenceValidator);
}
/**
* Populates the specified array with the probabilities of the last decoded sequence. The
* sequence was determined based on the previous call to <code>chunk</code>. The
* specified array should be at least as large as the numbe of tokens in the previous
* call to <code>chunk</code>.
*
* @param probs An array used to hold the probabilities of the last decoded sequence.
*/
public void probs(double[] probs) {
bestSequence.getProbs(probs);
}
/**
* Returns an array with the probabilities of the last decoded sequence. The
* sequence was determined based on the previous call to <code>chunk</code>.
* @return An array with the same number of probabilities as tokens were sent to <code>chunk</code>
* when it was last called.
*/
public double[] probs() {
return bestSequence.getProbs();
}
public static ChunkerModel train(String lang, ObjectStream<ChunkSample> in,
TrainingParameters mlParams, ChunkerFactory factory) throws IOException {
int beamSize = mlParams.getIntParameter(BeamSearch.BEAM_SIZE_PARAMETER, ChunkerME.DEFAULT_BEAM_SIZE);
Map<String, String> manifestInfoEntries = new HashMap<>();
TrainerType trainerType = TrainerFactory.getTrainerType(mlParams);
MaxentModel chunkerModel = null;
SequenceClassificationModel<String> seqChunkerModel = null;
if (TrainerType.EVENT_MODEL_TRAINER.equals(trainerType)) {
ObjectStream<Event> es = new ChunkerEventStream(in, factory.getContextGenerator());
EventTrainer trainer = TrainerFactory.getEventTrainer(mlParams,
manifestInfoEntries);
chunkerModel = trainer.train(es);
}
else if (TrainerType.SEQUENCE_TRAINER.equals(trainerType)) {
SequenceTrainer trainer = TrainerFactory.getSequenceModelTrainer(
mlParams, manifestInfoEntries);
// TODO: This will probably cause issue, since the feature generator uses the outcomes array
ChunkSampleSequenceStream ss = new ChunkSampleSequenceStream(in, factory.getContextGenerator());
seqChunkerModel = trainer.train(ss);
}
else {
throw new IllegalArgumentException("Trainer type is not supported: " + trainerType);
}
if (chunkerModel != null) {
return new ChunkerModel(lang, chunkerModel, beamSize, manifestInfoEntries, factory);
}
else {
return new ChunkerModel(lang, seqChunkerModel, manifestInfoEntries, factory);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/ChunkerModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
import opennlp.tools.ml.BeamSearch;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.util.BaseToolFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.TokenTag;
import opennlp.tools.util.model.BaseModel;
/**
* The {@link ChunkerModel} is the model used
* by a learnable {@link Chunker}.
*
* @see ChunkerME
*/
public class ChunkerModel extends BaseModel {
private static final String COMPONENT_NAME = "ChunkerME";
private static final String CHUNKER_MODEL_ENTRY_NAME = "chunker.model";
public ChunkerModel(String languageCode, SequenceClassificationModel<String> chunkerModel,
Map<String, String> manifestInfoEntries, ChunkerFactory factory) {
super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory);
artifactMap.put(CHUNKER_MODEL_ENTRY_NAME, chunkerModel);
checkArtifactMap();
}
public ChunkerModel(String languageCode, MaxentModel chunkerModel,
Map<String, String> manifestInfoEntries, ChunkerFactory factory) {
this(languageCode, chunkerModel, ChunkerME.DEFAULT_BEAM_SIZE, manifestInfoEntries, factory);
}
public ChunkerModel(String languageCode, MaxentModel chunkerModel, int beamSize,
Map<String, String> manifestInfoEntries, ChunkerFactory factory) {
super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory);
artifactMap.put(CHUNKER_MODEL_ENTRY_NAME, chunkerModel);
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
manifest.put(BeamSearch.BEAM_SIZE_PARAMETER, Integer.toString(beamSize));
checkArtifactMap();
}
public ChunkerModel(String languageCode, MaxentModel chunkerModel, ChunkerFactory factory) {
this(languageCode, chunkerModel, null, factory);
}
public ChunkerModel(InputStream in) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, in);
}
public ChunkerModel(File modelFile) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, modelFile);
}
public ChunkerModel(Path modelPath) throws IOException, InvalidFormatException {
this(modelPath.toFile());
}
public ChunkerModel(URL modelURL) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, modelURL);
}
@Override
protected void validateArtifactMap() throws InvalidFormatException {
super.validateArtifactMap();
if (!(artifactMap.get(CHUNKER_MODEL_ENTRY_NAME) instanceof AbstractModel)) {
throw new InvalidFormatException("Chunker model is incomplete!");
}
// Since 1.8.0 we changed the ChunkerFactory signature. This will check the if the model
// declares a not default factory, and if yes, check if it was created before 1.8
if ( (getManifestProperty(FACTORY_NAME) != null
&& !getManifestProperty(FACTORY_NAME).equals("opennlp.tools.chunker.ChunkerFactory") )
&& this.getVersion().getMajor() <= 1
&& this.getVersion().getMinor() < 8) {
throw new InvalidFormatException("The Chunker factory '" + getManifestProperty(FACTORY_NAME) +
"' is no longer compatible. Please update it to match the latest ChunkerFactory.");
}
}
/**
* @deprecated use getChunkerSequenceModel instead. This method will be removed soon.
*/
@Deprecated
public MaxentModel getChunkerModel() {
if (artifactMap.get(CHUNKER_MODEL_ENTRY_NAME) instanceof MaxentModel) {
return (MaxentModel) artifactMap.get(CHUNKER_MODEL_ENTRY_NAME);
}
else {
return null;
}
}
public SequenceClassificationModel<TokenTag> getChunkerSequenceModel() {
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
if (artifactMap.get(CHUNKER_MODEL_ENTRY_NAME) instanceof MaxentModel) {
String beamSizeString = manifest.getProperty(BeamSearch.BEAM_SIZE_PARAMETER);
int beamSize = ChunkerME.DEFAULT_BEAM_SIZE;
if (beamSizeString != null) {
beamSize = Integer.parseInt(beamSizeString);
}
return new BeamSearch<>(beamSize, (MaxentModel) artifactMap.get(CHUNKER_MODEL_ENTRY_NAME));
}
else if (artifactMap.get(CHUNKER_MODEL_ENTRY_NAME) instanceof SequenceClassificationModel) {
return (SequenceClassificationModel) artifactMap.get(CHUNKER_MODEL_ENTRY_NAME);
}
else {
return null;
}
}
@Override
protected Class<? extends BaseToolFactory> getDefaultFactory() {
return ChunkerFactory.class;
}
public ChunkerFactory getFactory() {
return (ChunkerFactory) this.toolFactory;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/DefaultChunkerContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.TokenTag;
/** Features based on chunking model described in Fei Sha and Fernando Pereira. Shallow
* parsing with conditional random fields. In Proceedings of HLT-NAACL 2003. Association
* for Computational Linguistics, 2003.
*/
public class DefaultChunkerContextGenerator implements ChunkerContextGenerator {
/**
* Creates the default context generator a chunker.
*/
public DefaultChunkerContextGenerator() {
}
public String[] getContext(int index, String[] tokens, String[] postags,
String[] priorDecisions, Object[] additionalContext) {
return getContext(index, tokens, postags, priorDecisions);
}
public String[] getContext(int i, String[] toks, String[] tags, String[] preds) {
// Words in a 5-word window
String w_2, w_1, w0, w1, w2;
// Tags in a 5-word window
String t_2, t_1, t0, t1, t2;
// Previous predictions
String p_2, p_1;
if (i < 2) {
w_2 = "w_2=bos";
t_2 = "t_2=bos";
p_2 = "p_2=bos";
}
else {
w_2 = "w_2=" + toks[i - 2];
t_2 = "t_2=" + tags[i - 2];
p_2 = "p_2" + preds[i - 2];
}
if (i < 1) {
w_1 = "w_1=bos";
t_1 = "t_1=bos";
p_1 = "p_1=bos";
}
else {
w_1 = "w_1=" + toks[i - 1];
t_1 = "t_1=" + tags[i - 1];
p_1 = "p_1=" + preds[i - 1];
}
w0 = "w0=" + toks[i];
t0 = "t0=" + tags[i];
if (i + 1 >= toks.length) {
w1 = "w1=eos";
t1 = "t1=eos";
}
else {
w1 = "w1=" + toks[i + 1];
t1 = "t1=" + tags[i + 1];
}
if (i + 2 >= toks.length) {
w2 = "w2=eos";
t2 = "t2=eos";
}
else {
w2 = "w2=" + toks[i + 2];
t2 = "t2=" + tags[i + 2];
}
String[] features = new String[] {
//add word features
w_2,
w_1,
w0,
w1,
w2,
w_1 + w0,
w0 + w1,
//add tag features
t_2,
t_1,
t0,
t1,
t2,
t_2 + t_1,
t_1 + t0,
t0 + t1,
t1 + t2,
t_2 + t_1 + t0,
t_1 + t0 + t1,
t0 + t1 + t2,
//add pred tags
p_2,
p_1,
p_2 + p_1,
//add pred and tag
p_1 + t_2,
p_1 + t_1,
p_1 + t0,
p_1 + t1,
p_1 + t2,
p_1 + t_2 + t_1,
p_1 + t_1 + t0,
p_1 + t0 + t1,
p_1 + t1 + t2,
p_1 + t_2 + t_1 + t0,
p_1 + t_1 + t0 + t1,
p_1 + t0 + t1 + t2,
//add pred and word
p_1 + w_2,
p_1 + w_1,
p_1 + w0,
p_1 + w1,
p_1 + w2,
p_1 + w_1 + w0,
p_1 + w0 + w1
};
return features;
}
@Override
public String[] getContext(int index, TokenTag[] sequence, String[] priorDecisions,
Object[] additionalContext) {
String[] token = TokenTag.extractTokens(sequence);
String[] tags = TokenTag.extractTags(sequence);
return getContext(index, token, tags, priorDecisions, additionalContext);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/DefaultChunkerSequenceValidator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.chunker;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.TokenTag;
public class DefaultChunkerSequenceValidator implements SequenceValidator<TokenTag> {
private boolean validOutcome(String outcome, String prevOutcome) {
if (outcome.startsWith("I-")) {
if (prevOutcome == null) {
return false;
}
else {
if (prevOutcome.equals("O")) {
return false;
}
if (!prevOutcome.substring(2).equals(outcome.substring(2))) {
return false;
}
}
}
return true;
}
protected boolean validOutcome(String outcome, String[] sequence) {
String prevOutcome = null;
if (sequence.length > 0) {
prevOutcome = sequence[sequence.length - 1];
}
return validOutcome(outcome,prevOutcome);
}
public boolean validSequence(int i, TokenTag[] sequence, String[] s, String outcome) {
return validOutcome(outcome, s);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/chunker/package-info.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Package related to finding non-recursive syntactic annotation such as noun phrase chunks.
*/
package opennlp.tools.chunker;
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/AbstractConverterTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.IOException;
import java.util.Map;
import opennlp.tools.util.ObjectStream;
/**
* Base class for format conversion tools.
*
* @param <T> class of data sample the tool converts, for example {@link opennlp.tools.postag
* .POSSample}
*/
public abstract class AbstractConverterTool<T> extends TypedCmdLineTool<T> {
/**
* Constructor with type parameter.
*
* @param sampleType class of the template parameter
*/
protected AbstractConverterTool(Class<T> sampleType) {
super(sampleType);
}
public String getShortDescription() {
Map<String, ObjectStreamFactory<T>> factories = StreamFactoryRegistry.getFactories(type);
StringBuilder help = new StringBuilder();
if (2 == factories.keySet().size()) { //opennlp + foreign
for (String format : factories.keySet()) {
if (!StreamFactoryRegistry.DEFAULT_FORMAT.equals(format)) {
help.append(format);
}
}
return "converts " + help.toString() + " data format to native OpenNLP format";
} else if (2 < factories.keySet().size()) {
for (String format : factories.keySet()) {
if (!StreamFactoryRegistry.DEFAULT_FORMAT.equals(format)) {
help.append(format).append(",");
}
}
return "converts foreign data formats (" + help.substring(0, help.length() - 1 ) +
") to native OpenNLP format";
} else {
throw new AssertionError("There should be more than 1 factory registered for converter " +
"tool");
}
}
private String createHelpString(String format, String usage) {
return "Usage: " + CLI.CMD + " " + getName() + " " + format + " " + usage;
}
public String getHelp() {
Map<String, ObjectStreamFactory<T>> factories = StreamFactoryRegistry.getFactories(type);
StringBuilder help = new StringBuilder("help|");
for (String formatName : factories.keySet()) {
if (!StreamFactoryRegistry.DEFAULT_FORMAT.equals(formatName)) {
help.append(formatName).append("|");
}
}
return createHelpString(help.substring(0, help.length() - 1), "[help|options...]");
}
public String getHelp(String format) {
return getHelp();
}
public void run(String format, String[] args) {
if (0 == args.length) {
System.out.println(getHelp());
} else {
format = args[0];
ObjectStreamFactory<T> streamFactory = getStreamFactory(format);
String[] formatArgs = new String[args.length - 1];
System.arraycopy(args, 1, formatArgs, 0, formatArgs.length);
String helpString = createHelpString(format, ArgumentParser.createUsage(streamFactory.getParameters()));
if (0 == formatArgs.length || (1 == formatArgs.length && "help".equals(formatArgs[0]))) {
System.out.println(helpString);
System.exit(0);
}
String errorMessage = ArgumentParser.validateArgumentsLoudly(formatArgs, streamFactory.getParameters());
if (null != errorMessage) {
throw new TerminateToolException(1, errorMessage + "\n" + helpString);
}
try (ObjectStream<T> sampleStream = streamFactory.create(formatArgs)) {
Object sample;
while ((sample = sampleStream.read()) != null) {
System.out.println(sample.toString());
}
}
catch (IOException e) {
throw new TerminateToolException(-1, "IO error while converting data : " + e.getMessage(), e);
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/AbstractCrossValidatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
/**
* Base class for cross validator tools.
*/
public abstract class AbstractCrossValidatorTool<T, P> extends AbstractTrainerTool<T, P> {
/**
* Constructor with type parameters.
*
* @param sampleType class of the template parameter
* @param params interface with parameters
*/
protected AbstractCrossValidatorTool(Class<T> sampleType, Class<P> params) {
super(sampleType, params);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/AbstractEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import opennlp.tools.util.ObjectStream;
/**
* Base class for evaluator tools.
*/
public class AbstractEvaluatorTool<T, P> extends AbstractTypedParamTool<T, P> {
protected P params;
protected ObjectStreamFactory<T> factory;
protected ObjectStream<T> sampleStream;
/**
* Constructor with type parameters.
*
* @param sampleType class of the template parameter
* @param params interface with parameters
*/
protected AbstractEvaluatorTool(Class<T> sampleType, Class<P> params) {
super(sampleType, params);
}
public void run(String format, String[] args) {
validateAllArgs(args, this.paramsClass, format);
params = ArgumentParser.parse(
ArgumentParser.filter(args, this.paramsClass), this.paramsClass);
factory = getStreamFactory(format);
String[] fargs = ArgumentParser.filter(args, factory.getParameters());
validateFactoryArgs(factory, fargs);
sampleStream = factory.create(fargs);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/AbstractTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.IOException;
import opennlp.tools.util.InsufficientTrainingDataException;
import opennlp.tools.util.TrainingParameters;
/**
* Base class for trainer tools.
*/
public class AbstractTrainerTool<T, P> extends AbstractEvaluatorTool<T, P> {
protected TrainingParameters mlParams;
/**
* Constructor with type parameters.
*
* @param sampleType class of the template parameter
* @param params interface with parameters
*/
protected AbstractTrainerTool(Class<T> sampleType, Class<P> params) {
super(sampleType, params);
}
protected TerminateToolException createTerminationIOException(IOException e) {
if (e instanceof InsufficientTrainingDataException) {
return new TerminateToolException(-1, "\n\nERROR: Not enough training data\n" +
"The provided training data is not sufficient to create enough events to train a model.\n" +
"To resolve this error use more training data, if this doesn't help there might\n" +
"be some fundamental problem with the training data itself.");
}
return new TerminateToolException(-1, "IO error while reading training data or indexing data: " +
e.getMessage(), e);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/AbstractTypedParamTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
/**
* Base class for tools which take additional parameters. For example, trainers or evaluators.
*/
public abstract class AbstractTypedParamTool<T, P> extends TypedCmdLineTool<T> {
/**
* variable to access the parameters
*/
protected final Class<P> paramsClass;
/**
* Constructor with type parameters.
*
* @param sampleType class of the template parameter
* @param paramsClass interface with parameters
*/
protected AbstractTypedParamTool(Class<T> sampleType, Class<P> paramsClass) {
super(sampleType);
this.paramsClass = paramsClass;
}
public String getHelp(String format) {
if ("".equals(format) || StreamFactoryRegistry.DEFAULT_FORMAT.equals(format)) {
return getBasicHelp(paramsClass,
StreamFactoryRegistry.getFactory(type, StreamFactoryRegistry.DEFAULT_FORMAT).getParameters());
} else {
ObjectStreamFactory<T> factory = StreamFactoryRegistry.getFactory(type, format);
if (null == factory) {
throw new TerminateToolException(1, "Format " + format + " is not found.\n" + getHelp());
}
return "Usage: " + CLI.CMD + " " + getName() + "." + format + " " +
ArgumentParser.createUsage(paramsClass, factory.getParameters());
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/ArgumentParser.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.File;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Parser for command line arguments. The parser creates a dynamic proxy which
* can be access via a command line argument interface.
*
* <p>
*
* The command line argument proxy interface must follow these conventions:<br>
* - Methods do not define arguments<br>
* - Method names must start with get<br>
* - Allowed return types are Integer, Boolean, String, File and Charset.<br>
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public class ArgumentParser {
public @Retention(RetentionPolicy.RUNTIME) @interface OptionalParameter {
// CHECKSTYLE:OFF
String DEFAULT_CHARSET = "DEFAULT_CHARSET";
// CHECKSTYLE:ON
String defaultValue() default "";
}
public @Retention(RetentionPolicy.RUNTIME) @interface ParameterDescription {
String valueName();
String description() default "";
}
private interface ArgumentFactory {
String INVALID_ARG = "Invalid argument: %s %s \n";
Object parseArgument(Method method, String argName, String argValue);
}
private static class IntegerArgumentFactory implements ArgumentFactory {
public Object parseArgument(Method method, String argName, String argValue) {
Object value;
try {
value = Integer.parseInt(argValue);
}
catch (NumberFormatException e) {
throw new TerminateToolException(1, String.format(INVALID_ARG, argName, argValue) +
"Value must be an integer!", e);
}
return value;
}
}
private static class BooleanArgumentFactory implements ArgumentFactory {
public Object parseArgument(Method method, String argName, String argValue) {
return Boolean.parseBoolean(argValue);
}
}
private static class StringArgumentFactory implements ArgumentFactory {
public Object parseArgument(Method method, String argName, String argValue) {
return argValue;
}
}
private static class FileArgumentFactory implements ArgumentFactory {
public Object parseArgument(Method method, String argName, String argValue) {
return new File(argValue);
}
}
private static class CharsetArgumentFactory implements ArgumentFactory {
public Object parseArgument(Method method, String argName, String charsetName) {
try {
if (OptionalParameter.DEFAULT_CHARSET.equals(charsetName)) {
return Charset.defaultCharset();
} else if (Charset.isSupported(charsetName)) {
return Charset.forName(charsetName);
} else {
throw new TerminateToolException(1, String.format(INVALID_ARG, argName, charsetName) +
"Encoding not supported on this platform.");
}
} catch (IllegalCharsetNameException e) {
throw new TerminateToolException(1, String.format(INVALID_ARG, argName, charsetName) +
"Illegal encoding name.");
}
}
}
private static class ArgumentProxy implements InvocationHandler {
private final Map<String, Object> arguments;
ArgumentProxy(Map<String, Object> arguments) {
this.arguments = arguments;
}
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
if (args != null)
throw new IllegalStateException();
return arguments.get(method.getName());
}
}
private static final Map<Class<?>, ArgumentFactory> argumentFactories;
static {
Map<Class<?>, ArgumentFactory> factories = new HashMap<>();
factories.put(Integer.class, new IntegerArgumentFactory());
factories.put(Boolean.class, new BooleanArgumentFactory());
factories.put(String.class, new StringArgumentFactory());
factories.put(File.class, new FileArgumentFactory());
factories.put(Charset.class, new CharsetArgumentFactory());
argumentFactories = Collections.unmodifiableMap(factories);
}
private ArgumentParser() {
}
private static void checkProxyInterfaces(Class<?>... proxyInterfaces) {
for (Class<?> proxyInterface : proxyInterfaces) {
if (null != proxyInterface) {
if (!proxyInterface.isInterface())
throw new IllegalArgumentException("proxy interface is not an interface!");
// all checks should also be performed for super interfaces
Method[] methods = proxyInterface.getMethods();
if (methods.length == 0)
throw new IllegalArgumentException("proxy interface must at least declare one method!");
for (Method method : methods) {
// check that method names start with get
if (!method.getName().startsWith("get") && method.getName().length() > 3)
throw new IllegalArgumentException(method.getName() + " method name does not start with 'get'!");
// check that method has zero arguments
if (method.getParameterTypes().length != 0) {
throw new IllegalArgumentException(method.getName()
+ " method must have zero parameters but has "
+ method.getParameterTypes().length + "!");
}
// check return types of interface
Class<?> returnType = method.getReturnType();
Set<Class<?>> compatibleReturnTypes = argumentFactories.keySet();
if (!compatibleReturnTypes.contains(returnType)) {
throw new IllegalArgumentException(method.getName()
+ " method must have compatible return type! Got "
+ returnType + ", expected one of " + compatibleReturnTypes);
}
}
}
}
}
private static String methodNameToParameter(String methodName) {
// remove get from method name
char[] parameterNameChars = methodName.toCharArray();
// name length is checked to be at least 4 prior
parameterNameChars[3] = Character.toLowerCase(parameterNameChars[3]);
return "-" + new String(parameterNameChars).substring(3);
}
/**
* Creates a usage string which can be printed in case the user did specify the arguments
* incorrectly. Incorrectly is defined as {@link ArgumentParser#validateArguments(String[], Class)}
* returns false.
*
* @param argProxyInterface interface with parameter descriptions
* @return the help message usage string
*/
@SuppressWarnings({"unchecked"})
public static <T> String createUsage(Class<T> argProxyInterface) {
return createUsage(new Class[]{argProxyInterface});
}
/**
* Auxiliary class that holds information about an argument. This is used by the
* GenerateManualTool, which creates a Docbook for the CLI automatically.
*/
static class Argument {
private final String argument;
private final String value;
private final String description;
private final boolean optional;
public Argument(String argument, String value, String description,
boolean optional) {
super();
this.argument = argument;
this.value = value;
this.description = description;
this.optional = optional;
}
public String getArgument() {
return argument;
}
public String getValue() {
return value;
}
public String getDescription() {
return description;
}
public boolean getOptional() {
return optional;
}
}
/**
* Outputs the arguments as a data structure so it can be used to create documentation.
*
* @param argProxyInterfaces interfaces with parameter descriptions
* @return the help message usage string
*/
public static List<Argument> createArguments(Class<?>... argProxyInterfaces) {
checkProxyInterfaces(argProxyInterfaces);
Set<String> duplicateFilter = new HashSet<>();
List<Argument> arguments = new LinkedList<>();
for (Class<?> argProxyInterface : argProxyInterfaces) {
if (null != argProxyInterface) {
for (Method method : argProxyInterface.getMethods()) {
ParameterDescription desc = method.getAnnotation(ParameterDescription.class);
OptionalParameter optional = method.getAnnotation(OptionalParameter.class);
if (desc != null) {
String paramName = methodNameToParameter(method.getName());
if (duplicateFilter.contains(paramName)) {
continue;
}
else {
duplicateFilter.add(paramName);
}
boolean isOptional = false;
if (optional != null)
isOptional = true;
Argument arg = new Argument(paramName.substring(1),
desc.valueName(), desc.description(), isOptional);
arguments.add(arg);
}
}
}
}
return arguments;
}
/**
* Creates a usage string which can be printed in case the user did specify the arguments
* incorrectly. Incorrectly is defined as {@link ArgumentParser#validateArguments(String[],
* Class[])}
* returns false.
*
* @param argProxyInterfaces interfaces with parameter descriptions
* @return the help message usage string
*/
public static String createUsage(Class<?>... argProxyInterfaces) {
checkProxyInterfaces(argProxyInterfaces);
Set<String> duplicateFilter = new HashSet<>();
StringBuilder usage = new StringBuilder();
StringBuilder details = new StringBuilder();
for (Class<?> argProxyInterface : argProxyInterfaces) {
if (null != argProxyInterface) {
for (Method method : argProxyInterface.getMethods()) {
ParameterDescription desc = method.getAnnotation(ParameterDescription.class);
OptionalParameter optional = method.getAnnotation(OptionalParameter.class);
if (desc != null) {
String paramName = methodNameToParameter(method.getName());
if (duplicateFilter.contains(paramName)) {
continue;
}
else {
duplicateFilter.add(paramName);
}
if (optional != null)
usage.append('[');
usage.append(paramName).append(' ').append(desc.valueName());
details.append('\t').append(paramName).append(' ').append(desc.valueName()).append('\n');
if (desc.description().length() > 0) {
details.append("\t\t").append(desc.description()).append('\n');
}
if (optional != null)
usage.append(']');
usage.append(' ');
}
}
}
}
if (usage.length() > 0)
usage.setLength(usage.length() - 1);
if (details.length() > 0) {
details.setLength(details.length() - 1);
usage.append("\n\nArguments description:\n").append(details.toString());
}
return usage.toString();
}
/**
* Tests if the argument are correct or incorrect. Incorrect means, that mandatory arguments are missing or
* there are unknown arguments. The argument value itself can also be incorrect, but this
* is checked by the {@link ArgumentParser#parse(String[], Class)} method and reported accordingly.
*
* @param args command line arguments
* @param argProxyInterface interface with parameters description
* @return true, if arguments are valid
*/
@SuppressWarnings({"unchecked"})
public static <T> boolean validateArguments(String[] args, Class<T> argProxyInterface) {
return validateArguments(args, new Class[]{argProxyInterface});
}
/**
* Tests if the argument are correct or incorrect. Incorrect means, that mandatory arguments are missing or
* there are unknown arguments. The argument value itself can also be incorrect, but this
* is checked by the {@link ArgumentParser#parse(String[], Class)} method and reported accordingly.
*
* @param args command line arguments
* @param argProxyInterfaces interfaces with parameters description
* @return true, if arguments are valid
*/
public static boolean validateArguments(String[] args, Class<?>... argProxyInterfaces) {
return null == validateArgumentsLoudly(args, argProxyInterfaces);
}
/**
* Tests if the arguments are correct or incorrect.
*
* @param args command line arguments
* @param argProxyInterface interface with parameters description
* @return null, if arguments are valid or error message otherwise
*/
public static String validateArgumentsLoudly(String[] args, Class<?> argProxyInterface) {
return validateArgumentsLoudly(args, new Class[]{argProxyInterface});
}
/**
* Tests if the arguments are correct or incorrect.
*
* @param args command line arguments
* @param argProxyInterfaces interfaces with parameters description
* @return null, if arguments are valid or error message otherwise
*/
public static String validateArgumentsLoudly(String[] args, Class<?>... argProxyInterfaces) {
// number of parameters must be always be even
if (args.length % 2 != 0) {
return "Number of parameters must be always be even";
}
int argumentCount = 0;
List<String> parameters = new ArrayList<>(Arrays.asList(args));
for (Class<?> argProxyInterface : argProxyInterfaces) {
for (Method method : argProxyInterface.getMethods()) {
String paramName = methodNameToParameter(method.getName());
int paramIndex = CmdLineUtil.getParameterIndex(paramName, args);
String valueString = CmdLineUtil.getParameter(paramName, args);
if (valueString == null) {
OptionalParameter optionalParam = method.getAnnotation(OptionalParameter.class);
if (optionalParam == null) {
if (-1 < paramIndex) {
return "Missing mandatory parameter value: " + paramName;
} else {
return "Missing mandatory parameter: " + paramName;
}
} else {
parameters.remove("-" + paramName);
}
}
else {
parameters.remove(paramName);
parameters.remove(valueString);
argumentCount++;
}
}
}
if (args.length / 2 > argumentCount) {
return "Unrecognized parameters encountered: " + parameters.toString();
}
return null;
}
/**
* Parses the passed arguments and creates an instance of the proxy interface.
* <p>
* In case an argument value cannot be parsed a {@link TerminateToolException} is
* thrown which contains an error message which explains the problems.
*
* @param args arguments
* @param argProxyInterface interface with parameters description
*
* @return parsed parameters
*
* @throws TerminateToolException if an argument value cannot be parsed.
* @throws IllegalArgumentException if validateArguments returns false,
* if the proxy interface is not compatible.
*/
@SuppressWarnings("unchecked")
public static <T> T parse(String[] args, Class<T> argProxyInterface) {
checkProxyInterfaces(argProxyInterface);
if (!validateArguments(args, argProxyInterface))
throw new IllegalArgumentException("Passed args must be valid!");
Map<String, Object> arguments = new HashMap<>();
for (Method method : argProxyInterface.getMethods()) {
String parameterName = methodNameToParameter(method.getName());
String valueString = CmdLineUtil.getParameter(parameterName, args);
if (valueString == null) {
OptionalParameter optionalParam = method.getAnnotation(OptionalParameter.class);
if (optionalParam.defaultValue().length() > 0)
valueString = optionalParam.defaultValue();
else
valueString = null;
}
Class<?> returnType = method.getReturnType();
Object value;
if (valueString != null) {
ArgumentFactory factory = argumentFactories.get(returnType);
if (factory == null)
throw new IllegalStateException("factory for '" + returnType + "' must not be null");
value = factory.parseArgument(method, parameterName, valueString);
}
else
value = null;
arguments.put(method.getName(), value);
}
return (T) java.lang.reflect.Proxy.newProxyInstance(
argProxyInterface.getClassLoader(),
new Class[]{argProxyInterface},
new ArgumentProxy(arguments));
}
/**
* Filters arguments leaving only those pertaining to argProxyInterface.
*
* @param args arguments
* @param argProxyInterface interface with parameters description
* @param <T> T
* @return arguments pertaining to argProxyInterface
*/
public static <T> String[] filter(String[] args, Class<T> argProxyInterface) {
ArrayList<String> parameters = new ArrayList<>(args.length);
for (Method method : argProxyInterface.getMethods()) {
String parameterName = methodNameToParameter(method.getName());
int idx = CmdLineUtil.getParameterIndex(parameterName, args);
if (-1 < idx) {
parameters.add(parameterName);
String valueString = CmdLineUtil.getParameter(parameterName, args);
if (null != valueString) {
parameters.add(valueString);
}
}
}
return parameters.toArray(new String[parameters.size()]);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/BasicCmdLineTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
/**
* A simple tool which can be executed from the command line.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public abstract class BasicCmdLineTool extends CmdLineTool {
/**
* Executes the tool with the given parameters.
*
* @param args arguments
*/
public abstract void run(String[] args);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/CLI.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import opennlp.tools.cmdline.chunker.ChunkerConverterTool;
import opennlp.tools.cmdline.chunker.ChunkerCrossValidatorTool;
import opennlp.tools.cmdline.chunker.ChunkerEvaluatorTool;
import opennlp.tools.cmdline.chunker.ChunkerMETool;
import opennlp.tools.cmdline.chunker.ChunkerTrainerTool;
import opennlp.tools.cmdline.dictionary.DictionaryBuilderTool;
import opennlp.tools.cmdline.doccat.DoccatConverterTool;
import opennlp.tools.cmdline.doccat.DoccatCrossValidatorTool;
import opennlp.tools.cmdline.doccat.DoccatEvaluatorTool;
import opennlp.tools.cmdline.doccat.DoccatTool;
import opennlp.tools.cmdline.doccat.DoccatTrainerTool;
import opennlp.tools.cmdline.entitylinker.EntityLinkerTool;
import opennlp.tools.cmdline.langdetect.LanguageDetectorConverterTool;
import opennlp.tools.cmdline.langdetect.LanguageDetectorCrossValidatorTool;
import opennlp.tools.cmdline.langdetect.LanguageDetectorEvaluatorTool;
import opennlp.tools.cmdline.langdetect.LanguageDetectorTool;
import opennlp.tools.cmdline.langdetect.LanguageDetectorTrainerTool;
import opennlp.tools.cmdline.languagemodel.NGramLanguageModelTool;
import opennlp.tools.cmdline.lemmatizer.LemmatizerEvaluatorTool;
import opennlp.tools.cmdline.lemmatizer.LemmatizerMETool;
import opennlp.tools.cmdline.lemmatizer.LemmatizerTrainerTool;
import opennlp.tools.cmdline.namefind.CensusDictionaryCreatorTool;
import opennlp.tools.cmdline.namefind.TokenNameFinderConverterTool;
import opennlp.tools.cmdline.namefind.TokenNameFinderCrossValidatorTool;
import opennlp.tools.cmdline.namefind.TokenNameFinderEvaluatorTool;
import opennlp.tools.cmdline.namefind.TokenNameFinderTool;
import opennlp.tools.cmdline.namefind.TokenNameFinderTrainerTool;
import opennlp.tools.cmdline.parser.BuildModelUpdaterTool;
import opennlp.tools.cmdline.parser.CheckModelUpdaterTool;
import opennlp.tools.cmdline.parser.ParserConverterTool;
import opennlp.tools.cmdline.parser.ParserEvaluatorTool;
import opennlp.tools.cmdline.parser.ParserTool;
import opennlp.tools.cmdline.parser.ParserTrainerTool;
import opennlp.tools.cmdline.parser.TaggerModelReplacerTool;
import opennlp.tools.cmdline.postag.POSTaggerConverterTool;
import opennlp.tools.cmdline.postag.POSTaggerCrossValidatorTool;
import opennlp.tools.cmdline.postag.POSTaggerEvaluatorTool;
import opennlp.tools.cmdline.postag.POSTaggerTrainerTool;
import opennlp.tools.cmdline.sentdetect.SentenceDetectorConverterTool;
import opennlp.tools.cmdline.sentdetect.SentenceDetectorCrossValidatorTool;
import opennlp.tools.cmdline.sentdetect.SentenceDetectorEvaluatorTool;
import opennlp.tools.cmdline.sentdetect.SentenceDetectorTool;
import opennlp.tools.cmdline.sentdetect.SentenceDetectorTrainerTool;
import opennlp.tools.cmdline.tokenizer.DictionaryDetokenizerTool;
import opennlp.tools.cmdline.tokenizer.SimpleTokenizerTool;
import opennlp.tools.cmdline.tokenizer.TokenizerConverterTool;
import opennlp.tools.cmdline.tokenizer.TokenizerCrossValidatorTool;
import opennlp.tools.cmdline.tokenizer.TokenizerMEEvaluatorTool;
import opennlp.tools.cmdline.tokenizer.TokenizerMETool;
import opennlp.tools.cmdline.tokenizer.TokenizerTrainerTool;
import opennlp.tools.util.Version;
public final class CLI {
public static final String CMD = "opennlp";
private static Map<String, CmdLineTool> toolLookupMap;
static {
toolLookupMap = new LinkedHashMap<>();
List<CmdLineTool> tools = new LinkedList<>();
// Document Categorizer
tools.add(new DoccatTool());
tools.add(new DoccatTrainerTool());
tools.add(new DoccatEvaluatorTool());
tools.add(new DoccatCrossValidatorTool());
tools.add(new DoccatConverterTool());
// Language Detector
tools.add(new LanguageDetectorTool());
tools.add(new LanguageDetectorTrainerTool());
tools.add(new LanguageDetectorConverterTool());
tools.add(new LanguageDetectorCrossValidatorTool());
tools.add(new LanguageDetectorEvaluatorTool());
// Dictionary Builder
tools.add(new DictionaryBuilderTool());
// Tokenizer
tools.add(new SimpleTokenizerTool());
tools.add(new TokenizerMETool());
tools.add(new TokenizerTrainerTool());
tools.add(new TokenizerMEEvaluatorTool());
tools.add(new TokenizerCrossValidatorTool());
tools.add(new TokenizerConverterTool());
tools.add(new DictionaryDetokenizerTool());
// Sentence detector
tools.add(new SentenceDetectorTool());
tools.add(new SentenceDetectorTrainerTool());
tools.add(new SentenceDetectorEvaluatorTool());
tools.add(new SentenceDetectorCrossValidatorTool());
tools.add(new SentenceDetectorConverterTool());
// Name Finder
tools.add(new TokenNameFinderTool());
tools.add(new TokenNameFinderTrainerTool());
tools.add(new TokenNameFinderEvaluatorTool());
tools.add(new TokenNameFinderCrossValidatorTool());
tools.add(new TokenNameFinderConverterTool());
tools.add(new CensusDictionaryCreatorTool());
// POS Tagger
tools.add(new opennlp.tools.cmdline.postag.POSTaggerTool());
tools.add(new POSTaggerTrainerTool());
tools.add(new POSTaggerEvaluatorTool());
tools.add(new POSTaggerCrossValidatorTool());
tools.add(new POSTaggerConverterTool());
//Lemmatizer
tools.add(new LemmatizerMETool());
tools.add(new LemmatizerTrainerTool());
tools.add(new LemmatizerEvaluatorTool());
// Chunker
tools.add(new ChunkerMETool());
tools.add(new ChunkerTrainerTool());
tools.add(new ChunkerEvaluatorTool());
tools.add(new ChunkerCrossValidatorTool());
tools.add(new ChunkerConverterTool());
// Parser
tools.add(new ParserTool());
tools.add(new ParserTrainerTool()); // trains everything
tools.add(new ParserEvaluatorTool());
tools.add(new ParserConverterTool()); // trains everything
tools.add(new BuildModelUpdaterTool()); // re-trains build model
tools.add(new CheckModelUpdaterTool()); // re-trains build model
tools.add(new TaggerModelReplacerTool());
// Entity Linker
tools.add(new EntityLinkerTool());
// Language Model
tools.add(new NGramLanguageModelTool());
for (CmdLineTool tool : tools) {
toolLookupMap.put(tool.getName(), tool);
}
toolLookupMap = Collections.unmodifiableMap(toolLookupMap);
}
/**
* @return a set which contains all tool names
*/
public static Set<String> getToolNames() {
return toolLookupMap.keySet();
}
/**
* @return a read only map with tool names and instances
*/
public static Map<String, CmdLineTool> getToolLookupMap() {
return toolLookupMap;
}
private static void usage() {
System.out.print("OpenNLP " + Version.currentVersion().toString() + ". ");
System.out.println("Usage: " + CMD + " TOOL");
System.out.println("where TOOL is one of:");
// distance of tool name from line start
int numberOfSpaces = -1;
for (String toolName : toolLookupMap.keySet()) {
if (toolName.length() > numberOfSpaces) {
numberOfSpaces = toolName.length();
}
}
numberOfSpaces = numberOfSpaces + 4;
for (CmdLineTool tool : toolLookupMap.values()) {
System.out.print(" " + tool.getName());
for (int i = 0; i < Math.abs(tool.getName().length() - numberOfSpaces); i++) {
System.out.print(" ");
}
System.out.println(tool.getShortDescription());
}
System.out.println("All tools print help when invoked with help parameter");
System.out.println("Example: opennlp SimpleTokenizer help");
}
public static void main(String[] args) {
if (args.length == 0) {
usage();
System.exit(0);
}
final long startTime = System.currentTimeMillis();
String[] toolArguments = new String[args.length - 1];
System.arraycopy(args, 1, toolArguments, 0, toolArguments.length);
String toolName = args[0];
//check for format
String formatName = StreamFactoryRegistry.DEFAULT_FORMAT;
int idx = toolName.indexOf(".");
if (-1 < idx) {
formatName = toolName.substring(idx + 1);
toolName = toolName.substring(0, idx);
}
CmdLineTool tool = toolLookupMap.get(toolName);
try {
if (null == tool) {
throw new TerminateToolException(1, "Tool " + toolName + " is not found.");
}
if ((0 == toolArguments.length && tool.hasParams()) ||
0 < toolArguments.length && "help".equals(toolArguments[0])) {
if (tool instanceof TypedCmdLineTool) {
System.out.println(((TypedCmdLineTool<?>) tool).getHelp(formatName));
} else if (tool instanceof BasicCmdLineTool) {
System.out.println(tool.getHelp());
}
System.exit(0);
}
if (tool instanceof TypedCmdLineTool) {
((TypedCmdLineTool<?>) tool).run(formatName, toolArguments);
} else if (tool instanceof BasicCmdLineTool) {
if (-1 == idx) {
((BasicCmdLineTool) tool).run(toolArguments);
} else {
throw new TerminateToolException(1, "Tool " + toolName + " does not support formats.");
}
} else {
throw new TerminateToolException(1, "Tool " + toolName + " is not supported.");
}
}
catch (TerminateToolException e) {
if (e.getMessage() != null) {
System.err.println(e.getMessage());
}
if (e.getCause() != null) {
System.err.println(e.getCause().getMessage());
e.getCause().printStackTrace(System.err);
}
System.exit(e.getCode());
}
final long endTime = System.currentTimeMillis();
System.err.format("Execution time: %.3f seconds\n", (endTime - startTime) / 1000.0);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/CmdLineTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
/**
* Base class for all command line tools.
*/
public abstract class CmdLineTool {
protected CmdLineTool() {
}
/**
* Retrieves the name of the training data tool. The name (used as command)
* must not contain white spaces.
*
* @return the name of the command line tool
*/
public String getName() {
if (getClass().getName().endsWith("Tool")) {
return getClass().getSimpleName().substring(0, getClass().getSimpleName().length() - 4);
} else {
return getClass().getSimpleName();
}
}
/**
* Returns whether the tool has any command line params.
* @return whether the tool has any command line params
*/
public boolean hasParams() {
return true;
}
protected String getBasicHelp(Class<?> argProxyInterface) {
return getBasicHelp(new Class[]{argProxyInterface});
}
protected String getBasicHelp(Class<?>... argProxyInterfaces) {
return "Usage: " + CLI.CMD + " " + getName() + " " +
ArgumentParser.createUsage(argProxyInterfaces);
}
/**
* Retrieves a description on how to use the tool.
*
* @return a description on how to use the tool
*/
public abstract String getHelp();
protected <T> T validateAndParseParams(String[] args, Class<T> argProxyInterface) {
String errorMessage = ArgumentParser.validateArgumentsLoudly(args, argProxyInterface);
if (null != errorMessage) {
throw new TerminateToolException(1, errorMessage + "\n" + getHelp());
}
return ArgumentParser.parse(args, argProxyInterface);
}
/**
* Retrieves a short description of what the tool does.
*
* @return a short description of what the tool does
*/
public String getShortDescription() {
return "";
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/CmdLineUtil.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import opennlp.tools.ml.TrainerFactory;
import opennlp.tools.util.InputStreamFactory;
import opennlp.tools.util.MarkableFileInputStreamFactory;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.model.BaseModel;
/**
* Util class for the command line interface.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public final class CmdLineUtil {
static final int IO_BUFFER_SIZE = 1024 * 1024;
private CmdLineUtil() {
// not intended to be instantiated
}
/**
* Check that the given input file is valid.
* <p>
* To pass the test it must:<br>
* - exist<br>
* - not be a directory<br>
* - accessibly<br>
*
* @param name the name which is used to refer to the file in an error message, it
* should start with a capital letter.
*
* @param inFile the particular file to check to qualify an input file
*
* @throws TerminateToolException if test does not pass this exception is
* thrown and an error message is printed to the console.
*/
public static void checkInputFile(String name, File inFile) {
String isFailure = null;
if (inFile.isDirectory()) {
isFailure = "The " + name + " file is a directory!";
}
else if (!inFile.exists()) {
isFailure = "The " + name + " file does not exist!";
}
else if (!inFile.canRead()) {
isFailure = "No permissions to read the " + name + " file!";
}
if (null != isFailure) {
throw new TerminateToolException(-1, isFailure + " Path: " + inFile.getAbsolutePath());
}
}
/**
* Tries to ensure that it is possible to write to an output file.
* <p>
* The method does nothing if it is possible to write otherwise
* it prints an appropriate error message and a {@link TerminateToolException} is thrown.
* <p>
* Computing the contents of an output file (e.g. ME model) can be very time consuming.
* Prior to this computation it should be checked once that writing this output file is
* possible to be able to fail fast if not. If this validation is only done after a time
* consuming computation it could frustrate the user.
*
* @param name human-friendly file name. for example perceptron model
* @param outFile file
*/
public static void checkOutputFile(String name, File outFile) {
String isFailure = null;
if (outFile.exists()) {
// The file already exists, ensure that it is a normal file and that it is
// possible to write into it
if (outFile.isDirectory()) {
isFailure = "The " + name + " file is a directory!";
}
else if (outFile.isFile()) {
if (!outFile.canWrite()) {
isFailure = "No permissions to write the " + name + " file!";
}
}
else {
isFailure = "The " + name + " file is not a normal file!";
}
}
else {
// The file does not exist ensure its parent
// directory exists and has write permissions to create
// a new file in it
File parentDir = outFile.getAbsoluteFile().getParentFile();
if (parentDir != null && parentDir.exists()) {
if (!parentDir.canWrite()) {
isFailure = "No permissions to create the " + name + " file!";
}
}
else {
isFailure = "The parent directory of the " + name + " file does not exist, " +
"please create it first!";
}
}
if (null != isFailure) {
throw new TerminateToolException(-1, isFailure + " Path: " + outFile.getAbsolutePath());
}
}
public static FileInputStream openInFile(File file) {
try {
return new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1, "File '" + file + "' cannot be found", e);
}
}
public static InputStreamFactory createInputStreamFactory(File file) {
try {
return new MarkableFileInputStreamFactory(file);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1, "File '" + file + "' cannot be found", e);
}
}
/**
* Writes a {@link BaseModel} to disk. Occurring errors are printed to the console
* to inform the user.
*
* @param modelName type of the model, name is used in error messages.
* @param modelFile output file of the model
* @param model the model itself which should be written to disk
*/
public static void writeModel(String modelName, File modelFile, BaseModel model) {
CmdLineUtil.checkOutputFile(modelName + " model", modelFile);
System.err.print("Writing " + modelName + " model ... ");
long beginModelWritingTime = System.currentTimeMillis();
try (OutputStream modelOut = new BufferedOutputStream(
new FileOutputStream(modelFile), IO_BUFFER_SIZE)) {
model.serialize(modelOut);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "Error during writing model file '" + modelFile + "'", e);
}
long modelWritingDuration = System.currentTimeMillis() - beginModelWritingTime;
System.err.printf("done (%.3fs)\n", modelWritingDuration / 1000d);
System.err.println();
System.err.println("Wrote " + modelName + " model to");
System.err.println("path: " + modelFile.getAbsolutePath());
System.err.println();
}
/**
* Returns the index of the parameter in the arguments, or -1 if the parameter is not found.
*
* @param param parameter name
* @param args arguments
* @return the index of the parameter in the arguments, or -1 if the parameter is not found
*/
public static int getParameterIndex(String param, String[] args) {
for (int i = 0; i < args.length; i++) {
if (args[i].startsWith("-") && args[i].equals(param)) {
return i;
}
}
return -1;
}
/**
* Retrieves the specified parameter from the given arguments.
*
* @param param parameter name
* @param args arguments
* @return parameter value
*/
public static String getParameter(String param, String[] args) {
int i = getParameterIndex(param, args);
if (-1 < i) {
i++;
if (i < args.length) {
return args[i];
}
}
return null;
}
/**
* Retrieves the specified parameter from the specified arguments.
*
* @param param parameter name
* @param args arguments
* @return parameter value
*/
public static Integer getIntParameter(String param, String[] args) {
String value = getParameter(param, args);
try {
if (value != null)
return Integer.parseInt(value);
}
catch (NumberFormatException ignored) {
// in this case return null
}
return null;
}
/**
* Retrieves the specified parameter from the specified arguments.
*
* @param param parameter name
* @param args arguments
* @return parameter value
*/
public static Double getDoubleParameter(String param, String[] args) {
String value = getParameter(param, args);
try {
if (value != null)
return Double.parseDouble(value);
}
catch (NumberFormatException ignored) {
// in this case return null
}
return null;
}
public static void checkLanguageCode(String code) {
List<String> languageCodes = new ArrayList<>();
languageCodes.addAll(Arrays.asList(Locale.getISOLanguages()));
languageCodes.add("x-unspecified");
if (!languageCodes.contains(code)) {
throw new TerminateToolException(1, "Unknown language code " + code + ", " +
"must be an ISO 639 code!");
}
}
public static boolean containsParam(String param, String[] args) {
for (String arg : args) {
if (arg.equals(param)) {
return true;
}
}
return false;
}
public static void handleStdinIoError(IOException e) {
throw new TerminateToolException(-1, "IO Error while reading from stdin: " + e.getMessage(), e);
}
public static TerminateToolException createObjectStreamError(IOException e) {
return new TerminateToolException(-1, "IO Error while creating an Input Stream: " + e.getMessage(), e);
}
public static void handleCreateObjectStreamError(IOException e) {
throw createObjectStreamError(e);
}
// its optional, passing null is allowed
public static TrainingParameters loadTrainingParameters(String paramFile,
boolean supportSequenceTraining) {
TrainingParameters params = null;
if (paramFile != null) {
checkInputFile("Training Parameter", new File(paramFile));
try (InputStream paramsIn = new FileInputStream(new File(paramFile))) {
params = new opennlp.tools.util.TrainingParameters(paramsIn);
} catch (IOException e) {
throw new TerminateToolException(-1, "Error during parameters loading: " + e.getMessage(), e);
}
if (!TrainerFactory.isValid(params)) {
throw new TerminateToolException(1, "Training parameters file '" + paramFile + "' is invalid!");
}
TrainerFactory.TrainerType trainerType = TrainerFactory.getTrainerType(params);
if (!supportSequenceTraining
&& trainerType.equals(TrainerFactory.TrainerType.EVENT_MODEL_SEQUENCE_TRAINER)) {
throw new TerminateToolException(1, "Sequence training is not supported!");
}
}
return params;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/DetailedFMeasureListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import opennlp.tools.util.Span;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* This listener will gather detailed information about the sample under evaluation and will
* allow detailed FMeasure for each outcome.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public abstract class DetailedFMeasureListener<T> implements
EvaluationMonitor<T> {
private int samples = 0;
private Stats generalStats = new Stats();
private Map<String, Stats> statsForOutcome = new HashMap<>();
protected abstract Span[] asSpanArray(T sample);
public void correctlyClassified(T reference, T prediction) {
samples++;
// add all true positives!
Span[] spans = asSpanArray(reference);
for (Span span : spans) {
addTruePositive(span.getType());
}
}
public void missclassified(T reference, T prediction) {
samples++;
Span[] references = asSpanArray(reference);
Span[] predictions = asSpanArray(prediction);
Set<Span> refSet = new HashSet<>(Arrays.asList(references));
Set<Span> predSet = new HashSet<>(Arrays.asList(predictions));
for (Span ref : refSet) {
if (predSet.contains(ref)) {
addTruePositive(ref.getType());
} else {
addFalseNegative(ref.getType());
}
}
for (Span pred : predSet) {
if (!refSet.contains(pred)) {
addFalsePositive(pred.getType());
}
}
}
private void addTruePositive(String type) {
Stats s = initStatsForOutcomeAndGet(type);
s.incrementTruePositive();
s.incrementTarget();
generalStats.incrementTruePositive();
generalStats.incrementTarget();
}
private void addFalsePositive(String type) {
Stats s = initStatsForOutcomeAndGet(type);
s.incrementFalsePositive();
generalStats.incrementFalsePositive();
}
private void addFalseNegative(String type) {
Stats s = initStatsForOutcomeAndGet(type);
s.incrementTarget();
generalStats.incrementTarget();
}
private Stats initStatsForOutcomeAndGet(String type) {
if (!statsForOutcome.containsKey(type)) {
statsForOutcome.put(type, new Stats());
}
return statsForOutcome.get(type);
}
private static final String PERCENT = "%\u00207.2f%%";
private static final String FORMAT = "%12s: precision: " + PERCENT
+ "; recall: " + PERCENT + "; F1: " + PERCENT + ".";
private static final String FORMAT_EXTRA = FORMAT
+ " [target: %3d; tp: %3d; fp: %3d]";
public String createReport() {
return createReport(Locale.getDefault());
}
public String createReport(Locale locale) {
StringBuilder ret = new StringBuilder();
int tp = generalStats.getTruePositives();
int found = generalStats.getFalsePositives() + tp;
ret.append("Evaluated ").append(samples).append(" samples with ")
.append(generalStats.getTarget()).append(" entities; found: ")
.append(found).append(" entities; correct: ").append(tp).append(".\n");
ret.append(String.format(locale, FORMAT, "TOTAL",
zeroOrPositive(generalStats.getPrecisionScore() * 100),
zeroOrPositive(generalStats.getRecallScore() * 100),
zeroOrPositive(generalStats.getFMeasure() * 100)));
ret.append("\n");
SortedSet<String> set = new TreeSet<>(new F1Comparator());
set.addAll(statsForOutcome.keySet());
for (String type : set) {
ret.append(String.format(locale, FORMAT_EXTRA, type,
zeroOrPositive(statsForOutcome.get(type).getPrecisionScore() * 100),
zeroOrPositive(statsForOutcome.get(type).getRecallScore() * 100),
zeroOrPositive(statsForOutcome.get(type).getFMeasure() * 100),
statsForOutcome.get(type).getTarget(), statsForOutcome.get(type)
.getTruePositives(), statsForOutcome.get(type)
.getFalsePositives()));
ret.append("\n");
}
return ret.toString();
}
@Override
public String toString() {
return createReport();
}
private double zeroOrPositive(double v) {
if (v < 0) {
return 0;
}
return v;
}
private class F1Comparator implements Comparator<String> {
public int compare(String o1, String o2) {
if (o1.equals(o2))
return 0;
double t1 = 0;
double t2 = 0;
if (statsForOutcome.containsKey(o1))
t1 += statsForOutcome.get(o1).getFMeasure();
if (statsForOutcome.containsKey(o2))
t2 += statsForOutcome.get(o2).getFMeasure();
t1 = zeroOrPositive(t1);
t2 = zeroOrPositive(t2);
if (t1 + t2 > 0d) {
if (t1 > t2)
return -1;
return 1;
}
return o1.compareTo(o2);
}
}
/**
* Store the statistics.
*/
private class Stats {
// maybe we could use FMeasure class, but it wouldn't allow us to get
// details like total number of false positives and true positives.
private int falsePositiveCounter = 0;
private int truePositiveCounter = 0;
private int targetCounter = 0;
public void incrementFalsePositive() {
falsePositiveCounter++;
}
public void incrementTruePositive() {
truePositiveCounter++;
}
public void incrementTarget() {
targetCounter++;
}
public int getFalsePositives() {
return falsePositiveCounter;
}
public int getTruePositives() {
return truePositiveCounter;
}
public int getTarget() {
return targetCounter;
}
/**
* Retrieves the arithmetic mean of the precision scores calculated for each
* evaluated sample.
*
* @return the arithmetic mean of all precision scores
*/
public double getPrecisionScore() {
int tp = getTruePositives();
int selected = tp + getFalsePositives();
return selected > 0 ? (double) tp / (double) selected : 0;
}
/**
* Retrieves the arithmetic mean of the recall score calculated for each
* evaluated sample.
*
* @return the arithmetic mean of all recall scores
*/
public double getRecallScore() {
int target = getTarget();
int tp = getTruePositives();
return target > 0 ? (double) tp / (double) target : 0;
}
/**
* Retrieves the f-measure score.
*
* f-measure = 2 * precision * recall / (precision + recall)
*
* @return the f-measure or -1 if precision + recall <= 0
*/
public double getFMeasure() {
if (getPrecisionScore() + getRecallScore() > 0) {
return 2 * (getPrecisionScore() * getRecallScore())
/ (getPrecisionScore() + getRecallScore());
} else {
// cannot divide by zero, return error code
return -1;
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/EvaluationErrorPrinter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import opennlp.tools.util.Span;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* <b>Note:</b> Do not use this class, internal use only!
*/
public abstract class EvaluationErrorPrinter<T> implements EvaluationMonitor<T> {
protected PrintStream printStream;
protected EvaluationErrorPrinter(OutputStream outputStream) {
this.printStream = new PrintStream(outputStream);
}
// for the sentence detector
protected void printError(Span[] references, Span[] predictions,
T referenceSample, T predictedSample, String sentence) {
List<Span> falseNegatives = new ArrayList<>();
List<Span> falsePositives = new ArrayList<>();
findErrors(references, predictions, falseNegatives, falsePositives);
if (falsePositives.size() + falseNegatives.size() > 0) {
printSamples(referenceSample, predictedSample);
printErrors(falsePositives, falseNegatives, sentence);
}
}
// for namefinder, chunker...
protected void printError(String id, Span[] references, Span[] predictions,
T referenceSample, T predictedSample, String[] sentenceTokens) {
List<Span> falseNegatives = new ArrayList<>();
List<Span> falsePositives = new ArrayList<>();
findErrors(references, predictions, falseNegatives, falsePositives);
if (falsePositives.size() + falseNegatives.size() > 0) {
if (id != null) {
printStream.println("Id: {" + id + "}");
}
printSamples(referenceSample, predictedSample);
printErrors(falsePositives, falseNegatives, sentenceTokens);
}
}
protected void printError(Span[] references, Span[] predictions,
T referenceSample, T predictedSample, String[] sentenceTokens) {
printError(null, references, predictions, referenceSample, predictedSample, sentenceTokens);
}
// for pos tagger
protected void printError(String[] references, String[] predictions,
T referenceSample, T predictedSample, String[] sentenceTokens) {
List<String> filteredDoc = new ArrayList<>();
List<String> filteredRefs = new ArrayList<>();
List<String> filteredPreds = new ArrayList<>();
for (int i = 0; i < references.length; i++) {
if (!references[i].equals(predictions[i])) {
filteredDoc.add(sentenceTokens[i]);
filteredRefs.add(references[i]);
filteredPreds.add(predictions[i]);
}
}
if (filteredDoc.size() > 0) {
printSamples(referenceSample, predictedSample);
printErrors(filteredDoc, filteredRefs, filteredPreds);
}
}
// for others
protected void printError(T referenceSample, T predictedSample) {
printSamples(referenceSample, predictedSample);
printStream.println();
}
/**
* Auxiliary method to print tag errors
*
* @param filteredDoc
* the document tokens which were tagged wrong
* @param filteredRefs
* the reference tags
* @param filteredPreds
* the predicted tags
*/
private void printErrors(List<String> filteredDoc, List<String> filteredRefs,
List<String> filteredPreds) {
printStream.println("Errors: {");
printStream.println("Tok: Ref | Pred");
printStream.println("---------------");
for (int i = 0; i < filteredDoc.size(); i++) {
printStream.println(filteredDoc.get(i) + ": " + filteredRefs.get(i)
+ " | " + filteredPreds.get(i));
}
printStream.println("}\n");
}
/**
* Auxiliary method to print span errors
*
* @param falsePositives
* false positives span
* @param falseNegatives
* false negative span
* @param doc
* the document text
*/
private void printErrors(List<Span> falsePositives,
List<Span> falseNegatives, String doc) {
printStream.println("False positives: {");
for (Span span : falsePositives) {
printStream.println(span.getCoveredText(doc));
}
printStream.println("} False negatives: {");
for (Span span : falseNegatives) {
printStream.println(span.getCoveredText(doc));
}
printStream.println("}\n");
}
/**
* Auxiliary method to print span errors
*
* @param falsePositives
* false positives span
* @param falseNegatives
* false negative span
* @param toks
* the document tokens
*/
private void printErrors(List<Span> falsePositives,
List<Span> falseNegatives, String[] toks) {
printStream.println("False positives: {");
printStream.println(print(falsePositives, toks));
printStream.println("} False negatives: {");
printStream.println(print(falseNegatives, toks));
printStream.println("}\n");
}
/**
* Auxiliary method to print spans
*
* @param spans
* the span list
* @param toks
* the tokens array
* @return the spans as string
*/
private String print(List<Span> spans, String[] toks) {
return Arrays.toString(Span.spansToStrings(
spans.toArray(new Span[spans.size()]), toks));
}
/**
* Auxiliary method to print expected and predicted samples.
*
* @param referenceSample
* the reference sample
* @param predictedSample
* the predicted sample
*/
private <S> void printSamples(S referenceSample, S predictedSample) {
String details = "Expected: {\n" + referenceSample + "}\nPredicted: {\n"
+ predictedSample + "}";
printStream.println(details);
}
/**
* Outputs falseNegatives and falsePositives spans from the references and
* predictions list.
*
* @param references
* @param predictions
* @param falseNegatives
* [out] the false negatives list
* @param falsePositives
* [out] the false positives list
*/
private void findErrors(Span[] references, Span[] predictions,
List<Span> falseNegatives, List<Span> falsePositives) {
falseNegatives.addAll(Arrays.asList(references));
falsePositives.addAll(Arrays.asList(predictions));
for (int referenceIndex = 0; referenceIndex < references.length; referenceIndex++) {
Span referenceName = references[referenceIndex];
for (int predictedIndex = 0; predictedIndex < predictions.length; predictedIndex++) {
if (referenceName.equals(predictions[predictedIndex])) {
// got it, remove from fn and fp
falseNegatives.remove(referenceName);
falsePositives.remove(predictions[predictedIndex]);
}
}
}
}
public void correctlyClassified(T reference, T prediction) {
// do nothing
}
public abstract void missclassified(T reference, T prediction) ;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/FineGrainedReportListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.OutputStream;
import java.io.PrintStream;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import opennlp.tools.util.Span;
import opennlp.tools.util.eval.FMeasure;
import opennlp.tools.util.eval.Mean;
public abstract class FineGrainedReportListener {
private static final char[] alpha = { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',
'w', 'x', 'y', 'z' };
private final PrintStream printStream;
private final Stats stats = new Stats();
public FineGrainedReportListener(PrintStream printStream) {
this.printStream = printStream;
}
/**
* Writes the report to the {@link OutputStream}. Should be called only after
* the evaluation process
*/
public FineGrainedReportListener(OutputStream outputStream) {
this.printStream = new PrintStream(outputStream);
}
private static String generateAlphaLabel(int index) {
char[] labelChars = new char[3];
int i;
for (i = 2; i >= 0; i--) {
if (index >= 0) {
labelChars[i] = alpha[index % alpha.length];
index = index / alpha.length - 1;
} else {
labelChars[i] = ' ';
}
}
return new String(labelChars);
}
public abstract void writeReport();
// api methods
// general stats
protected Stats getStats() {
return this.stats;
}
private long getNumberOfSentences() {
return stats.getNumberOfSentences();
}
private double getAverageSentenceSize() {
return stats.getAverageSentenceSize();
}
private int getMinSentenceSize() {
return stats.getMinSentenceSize();
}
private int getMaxSentenceSize() {
return stats.getMaxSentenceSize();
}
private int getNumberOfTags() {
return stats.getNumberOfTags();
}
// token stats
private double getAccuracy() {
return stats.getAccuracy();
}
private double getTokenAccuracy(String token) {
return stats.getTokenAccuracy(token);
}
private SortedSet<String> getTokensOrderedByFrequency() {
return stats.getTokensOrderedByFrequency();
}
private int getTokenFrequency(String token) {
return stats.getTokenFrequency(token);
}
private int getTokenErrors(String token) {
return stats.getTokenErrors(token);
}
private SortedSet<String> getTokensOrderedByNumberOfErrors() {
return stats.getTokensOrderedByNumberOfErrors();
}
private SortedSet<String> getTagsOrderedByErrors() {
return stats.getTagsOrderedByErrors();
}
private int getTagFrequency(String tag) {
return stats.getTagFrequency(tag);
}
private int getTagErrors(String tag) {
return stats.getTagErrors(tag);
}
private double getTagPrecision(String tag) {
return stats.getTagPrecision(tag);
}
private double getTagRecall(String tag) {
return stats.getTagRecall(tag);
}
private double getTagFMeasure(String tag) {
return stats.getTagFMeasure(tag);
}
private SortedSet<String> getConfusionMatrixTagset() {
return stats.getConfusionMatrixTagset();
}
private SortedSet<String> getConfusionMatrixTagset(String token) {
return stats.getConfusionMatrixTagset(token);
}
private double[][] getConfusionMatrix() {
return stats.getConfusionMatrix();
}
private double[][] getConfusionMatrix(String token) {
return stats.getConfusionMatrix(token);
}
private String matrixToString(SortedSet<String> tagset, double[][] data,
boolean filter) {
// we dont want to print trivial cases (acc=1)
int initialIndex = 0;
String[] tags = tagset.toArray(new String[tagset.size()]);
StringBuilder sb = new StringBuilder();
int minColumnSize = Integer.MIN_VALUE;
String[][] matrix = new String[data.length][data[0].length];
for (int i = 0; i < data.length; i++) {
int j = 0;
for (; j < data[i].length - 1; j++) {
matrix[i][j] = data[i][j] > 0 ? Integer.toString((int) data[i][j])
: ".";
if (minColumnSize < matrix[i][j].length()) {
minColumnSize = matrix[i][j].length();
}
}
matrix[i][j] = MessageFormat.format("{0,number,#.##%}", data[i][j]);
if (data[i][j] == 1 && filter) {
initialIndex = i + 1;
}
}
final String headerFormat = "%" + (minColumnSize + 2) + "s "; // | 1234567 |
final String cellFormat = "%" + (minColumnSize + 2) + "s "; // | 12345 |
final String diagFormat = " %" + (minColumnSize + 2) + "s";
for (int i = initialIndex; i < tagset.size(); i++) {
sb.append(String.format(headerFormat,
generateAlphaLabel(i - initialIndex).trim()));
}
sb.append("| Accuracy | <-- classified as\n");
for (int i = initialIndex; i < data.length; i++) {
int j = initialIndex;
for (; j < data[i].length - 1; j++) {
if (i == j) {
String val = "<" + matrix[i][j] + ">";
sb.append(String.format(diagFormat, val));
} else {
sb.append(String.format(cellFormat, matrix[i][j]));
}
}
sb.append(
String.format("| %-6s | %3s = ", matrix[i][j],
generateAlphaLabel(i - initialIndex))).append(tags[i]);
sb.append("\n");
}
return sb.toString();
}
protected void printGeneralStatistics() {
printHeader("Evaluation summary");
printStream.append(
String.format("%21s: %6s", "Number of sentences",
Long.toString(getNumberOfSentences()))).append("\n");
printStream.append(
String.format("%21s: %6s", "Min sentence size", getMinSentenceSize()))
.append("\n");
printStream.append(
String.format("%21s: %6s", "Max sentence size", getMaxSentenceSize()))
.append("\n");
printStream.append(
String.format("%21s: %6s", "Average sentence size",
MessageFormat.format("{0,number,#.##}", getAverageSentenceSize())))
.append("\n");
printStream.append(
String.format("%21s: %6s", "Tags count", getNumberOfTags())).append(
"\n");
printStream.append(
String.format("%21s: %6s", "Accuracy",
MessageFormat.format("{0,number,#.##%}", getAccuracy()))).append(
"\n");
printFooter("Evaluation Corpus Statistics");
}
protected void printTokenOcurrenciesRank() {
printHeader("Most frequent tokens");
SortedSet<String> toks = getTokensOrderedByFrequency();
final int maxLines = 20;
int maxTokSize = 5;
int count = 0;
Iterator<String> tokIterator = toks.iterator();
while (tokIterator.hasNext() && count++ < maxLines) {
String tok = tokIterator.next();
if (tok.length() > maxTokSize) {
maxTokSize = tok.length();
}
}
int tableSize = maxTokSize + 19;
String format = "| %3s | %6s | %" + maxTokSize + "s |";
printLine(tableSize);
printStream.append(String.format(format, "Pos", "Count", "Token")).append(
"\n");
printLine(tableSize);
// get the first 20 errors
count = 0;
tokIterator = toks.iterator();
while (tokIterator.hasNext() && count++ < maxLines) {
String tok = tokIterator.next();
int ocurrencies = getTokenFrequency(tok);
printStream.append(String.format(format, count, ocurrencies, tok)
).append("\n");
}
printLine(tableSize);
printFooter("Most frequent tokens");
}
protected void printTokenErrorRank() {
printHeader("Tokens with the highest number of errors");
printStream.append("\n");
SortedSet<String> toks = getTokensOrderedByNumberOfErrors();
int maxTokenSize = 5;
int count = 0;
Iterator<String> tokIterator = toks.iterator();
while (tokIterator.hasNext() && count++ < 20) {
String tok = tokIterator.next();
if (tok.length() > maxTokenSize) {
maxTokenSize = tok.length();
}
}
int tableSize = 31 + maxTokenSize;
String format = "| %" + maxTokenSize + "s | %6s | %5s | %7s |\n";
printLine(tableSize);
printStream.append(String.format(format, "Token", "Errors", "Count",
"% Err"));
printLine(tableSize);
// get the first 20 errors
count = 0;
tokIterator = toks.iterator();
while (tokIterator.hasNext() && count++ < 20) {
String tok = tokIterator.next();
int ocurrencies = getTokenFrequency(tok);
int errors = getTokenErrors(tok);
String rate = MessageFormat.format("{0,number,#.##%}", (double) errors
/ ocurrencies);
printStream.append(String.format(format, tok, errors, ocurrencies, rate)
);
}
printLine(tableSize);
printFooter("Tokens with the highest number of errors");
}
protected void printTagsErrorRank() {
printHeader("Detailed Accuracy By Tag");
SortedSet<String> tags = getTagsOrderedByErrors();
printStream.append("\n");
int maxTagSize = 3;
for (String t : tags) {
if (t.length() > maxTagSize) {
maxTagSize = t.length();
}
}
int tableSize = 65 + maxTagSize;
String headerFormat = "| %" + maxTagSize
+ "s | %6s | %6s | %7s | %9s | %6s | %9s |\n";
String format = "| %" + maxTagSize
+ "s | %6s | %6s | %-7s | %-9s | %-6s | %-9s |\n";
printLine(tableSize);
printStream.append(String.format(headerFormat, "Tag", "Errors", "Count",
"% Err", "Precision", "Recall", "F-Measure"));
printLine(tableSize);
for (String tag : tags) {
int ocurrencies = getTagFrequency(tag);
int errors = getTagErrors(tag);
String rate = MessageFormat.format("{0,number,#.###}", (double) errors
/ ocurrencies);
double p = getTagPrecision(tag);
double r = getTagRecall(tag);
double f = getTagFMeasure(tag);
printStream.append(String.format(format, tag, errors, ocurrencies, rate,
MessageFormat.format("{0,number,#.###}", p > 0 ? p : 0),
MessageFormat.format("{0,number,#.###}", r > 0 ? r : 0),
MessageFormat.format("{0,number,#.###}", f > 0 ? f : 0))
);
}
printLine(tableSize);
printFooter("Tags with the highest number of errors");
}
protected void printGeneralConfusionTable() {
printHeader("Confusion matrix");
SortedSet<String> labels = getConfusionMatrixTagset();
double[][] confusionMatrix = getConfusionMatrix();
printStream.append("\nTags with 100% accuracy: ");
int line = 0;
for (String label : labels) {
if (confusionMatrix[line][confusionMatrix[0].length - 1] == 1) {
printStream.append(label).append(" (")
.append(Integer.toString((int) confusionMatrix[line][line]))
.append(") ");
}
line++;
}
printStream.append("\n\n");
printStream.append(matrixToString(labels, confusionMatrix, true));
printFooter("Confusion matrix");
}
protected void printDetailedConfusionMatrix() {
printHeader("Confusion matrix for tokens");
printStream.append(" sorted by number of errors\n");
SortedSet<String> toks = getTokensOrderedByNumberOfErrors();
for (String t : toks) {
double acc = getTokenAccuracy(t);
if (acc < 1) {
printStream
.append("\n[")
.append(t)
.append("]\n")
.append(
String.format("%12s: %-8s", "Accuracy",
MessageFormat.format("{0,number,#.##%}", acc)))
.append("\n");
printStream.append(
String.format("%12s: %-8s", "Ocurrencies",
Integer.toString(getTokenFrequency(t)))).append("\n");
printStream.append(
String.format("%12s: %-8s", "Errors",
Integer.toString(getTokenErrors(t)))).append("\n");
SortedSet<String> labels = getConfusionMatrixTagset(t);
double[][] confusionMatrix = getConfusionMatrix(t);
printStream.append(matrixToString(labels, confusionMatrix, false));
}
}
printFooter("Confusion matrix for tokens");
}
/** Auxiliary method that prints a emphasised report header */
private void printHeader(String text) {
printStream.append("=== ").append(text).append(" ===\n");
}
/** Auxiliary method that prints a marker to the end of a report */
private void printFooter(String text) {
printStream.append("\n<-end> ").append(text).append("\n\n");
}
/** Auxiliary method that prints a horizontal line of a given size */
private void printLine(int size) {
for (int i = 0; i < size; i++) {
printStream.append("-");
}
printStream.append("\n");
}
/**
* A comparator that sorts the confusion matrix labels according to the
* accuracy of each line
*/
public static class MatrixLabelComparator implements Comparator<String> {
private Map<String, ConfusionMatrixLine> confusionMatrix;
public MatrixLabelComparator(Map<String, ConfusionMatrixLine> confusionMatrix) {
this.confusionMatrix = confusionMatrix;
}
public int compare(String o1, String o2) {
if (o1.equals(o2)) {
return 0;
}
ConfusionMatrixLine t1 = confusionMatrix.get(o1);
ConfusionMatrixLine t2 = confusionMatrix.get(o2);
if (t1 == null || t2 == null) {
if (t1 == null) {
return 1;
} else {
return -1;
}
}
double r1 = t1.getAccuracy();
double r2 = t2.getAccuracy();
if (r1 == r2) {
return o1.compareTo(o2);
}
if (r2 > r1) {
return 1;
}
return -1;
}
}
public static class GroupedMatrixLabelComparator implements Comparator<String> {
private final HashMap<String, Double> categoryAccuracy;
private Map<String, ConfusionMatrixLine> confusionMatrix;
public GroupedMatrixLabelComparator(Map<String, ConfusionMatrixLine> confusionMatrix) {
this.confusionMatrix = confusionMatrix;
this.categoryAccuracy = new HashMap<>();
// compute grouped categories
for (Entry<String, ConfusionMatrixLine> entry : confusionMatrix.entrySet()) {
final String key = entry.getKey();
final ConfusionMatrixLine confusionMatrixLine = entry.getValue();
final String category;
if (key.contains("-")) {
category = key.split("-")[0];
} else {
category = key;
}
double currentAccuracy = categoryAccuracy.getOrDefault(category, 0.0d);
categoryAccuracy.put(category, currentAccuracy + confusionMatrixLine.getAccuracy());
}
}
public int compare(String o1, String o2) {
if (o1.equals(o2)) {
return 0;
}
String c1 = o1;
String c2 = o2;
if (o1.contains("-")) {
c1 = o1.split("-")[0];
}
if (o2.contains("-")) {
c2 = o2.split("-")[0];
}
if (c1.equals(c2)) { // same category - sort by confusion matrix
ConfusionMatrixLine t1 = confusionMatrix.get(o1);
ConfusionMatrixLine t2 = confusionMatrix.get(o2);
if (t1 == null || t2 == null) {
if (t1 == null) {
return 1;
} else {
return -1;
}
}
double r1 = t1.getAccuracy();
double r2 = t2.getAccuracy();
if (r1 == r2) {
return o1.compareTo(o2);
}
if (r2 > r1) {
return 1;
}
return -1;
} else { // different category - sort by category
Double t1 = categoryAccuracy.get(c1);
Double t2 = categoryAccuracy.get(c2);
if (t1 == null || t2 == null) {
if (t1 == null) {
return 1;
} else {
return -1;
}
}
if (t1.equals(t2)) {
return o1.compareTo(o2);
}
if (t2 > t1) {
return 1;
}
return -1;
}
}
}
public Comparator<String> getMatrixLabelComparator(Map<String, ConfusionMatrixLine> confusionMatrix) {
return new MatrixLabelComparator(confusionMatrix);
}
public static class SimpleLabelComparator implements Comparator<String> {
private Map<String, Counter> map;
public SimpleLabelComparator(Map<String, Counter> map) {
this.map = map;
}
@Override
public int compare(String o1, String o2) {
if (o1.equals(o2)) {
return 0;
}
int e1 = 0, e2 = 0;
if (map.containsKey(o1))
e1 = map.get(o1).value();
if (map.containsKey(o2))
e2 = map.get(o2).value();
if (e1 == e2) {
return o1.compareTo(o2);
}
return e2 - e1;
}
}
public Comparator<String> getLabelComparator(Map<String, Counter> map) {
return new SimpleLabelComparator(map);
}
public static class GroupedLabelComparator implements Comparator<String> {
private final HashMap<String, Integer> categoryCounter;
private Map<String, Counter> labelCounter;
public GroupedLabelComparator(Map<String, Counter> map) {
this.labelCounter = map;
this.categoryCounter = new HashMap<>();
// compute grouped categories
for (Entry<String, Counter> entry : labelCounter.entrySet()) {
final String key = entry.getKey();
final Counter value = entry.getValue();
final String category;
if (key.contains("-")) {
category = key.split("-")[0];
} else {
category = key;
}
int currentCount = categoryCounter.getOrDefault(category, 0);
categoryCounter.put(category, currentCount + value.value());
}
}
public int compare(String o1, String o2) {
if (o1.equals(o2)) {
return 0;
}
String c1 = o1;
String c2 = o2;
if (o1.contains("-")) {
c1 = o1.split("-")[0];
}
if (o2.contains("-")) {
c2 = o2.split("-")[0];
}
if (c1.equals(c2)) { // same category - sort by confusion matrix
Counter t1 = labelCounter.get(o1);
Counter t2 = labelCounter.get(o2);
if (t1 == null || t2 == null) {
if (t1 == null) {
return 1;
} else {
return -1;
}
}
int r1 = t1.value();
int r2 = t2.value();
if (r1 == r2) {
return o1.compareTo(o2);
}
if (r2 > r1) {
return 1;
}
return -1;
} else { // different category - sort by category
Integer t1 = categoryCounter.get(c1);
Integer t2 = categoryCounter.get(c2);
if (t1 == null || t2 == null) {
if (t1 == null) {
return 1;
} else {
return -1;
}
}
if (t1.equals(t2)) {
return o1.compareTo(o2);
}
if (t2 > t1) {
return 1;
}
return -1;
}
}
}
/**
* Represents a line in the confusion table.
*/
public static class ConfusionMatrixLine {
private Map<String, Counter> line = new HashMap<>();
private String ref;
private int total = 0;
private int correct = 0;
private double acc = -1;
/**
* Creates a new {@link ConfusionMatrixLine}
*
* @param ref
* the reference column
*/
private ConfusionMatrixLine(String ref) {
this.ref = ref;
}
/**
* Increments the counter for the given column and updates the statistics.
*
* @param column
* the column to be incremented
*/
private void increment(String column) {
total++;
if (column.equals(ref))
correct++;
if (!line.containsKey(column)) {
line.put(column, new Counter());
}
line.get(column).increment();
}
/**
* Gets the calculated accuracy of this element
*
* @return the accuracy
*/
public double getAccuracy() {
// we save the accuracy because it is frequently used by the comparator
if (Math.abs(acc - 1.0d) < 0.0000000001) {
if (total == 0)
acc = 0.0d;
acc = (double) correct / (double) total;
}
return acc;
}
/**
* Gets the value given a column
*
* @param column
* the column
* @return the counter value
*/
public int getValue(String column) {
Counter c = line.get(column);
if (c == null)
return 0;
return c.value();
}
}
/**
* Implements a simple counter
*/
public static class Counter {
private int c = 0;
private void increment() {
c++;
}
public int value() {
return c;
}
}
public class Stats {
// general statistics
private final Mean accuracy = new Mean();
private final Mean averageSentenceLength = new Mean();
// token statistics
private final Map<String, Mean> tokAccuracies = new HashMap<>();
private final Map<String, Counter> tokOcurrencies = new HashMap<>();
private final Map<String, Counter> tokErrors = new HashMap<>();
// tag statistics
private final Map<String, Counter> tagOcurrencies = new HashMap<>();
private final Map<String, Counter> tagErrors = new HashMap<>();
private final Map<String, FMeasure> tagFMeasure = new HashMap<>();
// represents a Confusion Matrix that aggregates all tokens
private final Map<String, ConfusionMatrixLine> generalConfusionMatrix = new HashMap<>();
// represents a set of Confusion Matrix for each token
private final Map<String, Map<String, ConfusionMatrixLine>> tokenConfusionMatrix = new HashMap<>();
private int minimalSentenceLength = Integer.MAX_VALUE;
private int maximumSentenceLength = Integer.MIN_VALUE;
public void add(String[] toks, String[] refs, String[] preds) {
int length = toks.length;
averageSentenceLength.add(length);
if (minimalSentenceLength > length) {
minimalSentenceLength = length;
}
if (maximumSentenceLength < length) {
maximumSentenceLength = length;
}
updateTagFMeasure(refs, preds);
for (int i = 0; i < toks.length; i++) {
commit(toks[i], refs[i], preds[i]);
}
}
public void add(int length, String ref, String pred) {
averageSentenceLength.add(length);
if (minimalSentenceLength > length) {
minimalSentenceLength = length;
}
if (maximumSentenceLength < length) {
maximumSentenceLength = length;
}
// String[] toks = reference.getSentence();
String[] refs = { ref };
String[] preds = { pred };
updateTagFMeasure(refs, preds);
commit("", ref, pred);
}
public void add(String[] text, String ref, String pred) {
int length = text.length;
this.add(length, ref, pred);
}
public void add(CharSequence text, String ref, String pred) {
int length = text.length();
this.add(length, ref, pred);
}
/**
* Includes a new evaluation data
*
* @param tok
* the evaluated token
* @param ref
* the reference pos tag
* @param pred
* the predicted pos tag
*/
private void commit(String tok, String ref, String pred) {
// token stats
if (!tokAccuracies.containsKey(tok)) {
tokAccuracies.put(tok, new Mean());
tokOcurrencies.put(tok, new Counter());
tokErrors.put(tok, new Counter());
}
tokOcurrencies.get(tok).increment();
// tag stats
if (!tagOcurrencies.containsKey(ref)) {
tagOcurrencies.put(ref, new Counter());
tagErrors.put(ref, new Counter());
}
tagOcurrencies.get(ref).increment();
// updates general, token and tag error stats
if (ref.equals(pred)) {
tokAccuracies.get(tok).add(1);
accuracy.add(1);
} else {
tokAccuracies.get(tok).add(0);
tokErrors.get(tok).increment();
tagErrors.get(ref).increment();
accuracy.add(0);
}
// populate confusion matrixes
if (!generalConfusionMatrix.containsKey(ref)) {
generalConfusionMatrix.put(ref, new ConfusionMatrixLine(ref));
}
generalConfusionMatrix.get(ref).increment(pred);
if (!tokenConfusionMatrix.containsKey(tok)) {
tokenConfusionMatrix.put(tok, new HashMap<>());
}
if (!tokenConfusionMatrix.get(tok).containsKey(ref)) {
tokenConfusionMatrix.get(tok).put(ref, new ConfusionMatrixLine(ref));
}
tokenConfusionMatrix.get(tok).get(ref).increment(pred);
}
private void updateTagFMeasure(String[] refs, String[] preds) {
// create a set with all tags
Set<String> tags = new HashSet<>(Arrays.asList(refs));
tags.addAll(Arrays.asList(preds));
// create samples for each tag
for (String tag : tags) {
List<Span> reference = new ArrayList<>();
List<Span> prediction = new ArrayList<>();
for (int i = 0; i < refs.length; i++) {
if (refs[i].equals(tag)) {
reference.add(new Span(i, i + 1));
}
if (preds[i].equals(tag)) {
prediction.add(new Span(i, i + 1));
}
}
if (!this.tagFMeasure.containsKey(tag)) {
this.tagFMeasure.put(tag, new FMeasure());
}
// populate the fmeasure
this.tagFMeasure.get(tag).updateScores(
reference.toArray(new Span[reference.size()]),
prediction.toArray(new Span[prediction.size()]));
}
}
private double getAccuracy() {
return accuracy.mean();
}
private int getNumberOfTags() {
return this.tagOcurrencies.keySet().size();
}
private long getNumberOfSentences() {
return this.averageSentenceLength.count();
}
private double getAverageSentenceSize() {
return this.averageSentenceLength.mean();
}
private int getMinSentenceSize() {
return this.minimalSentenceLength;
}
private int getMaxSentenceSize() {
return this.maximumSentenceLength;
}
private double getTokenAccuracy(String token) {
return tokAccuracies.get(token).mean();
}
private int getTokenErrors(String token) {
return tokErrors.get(token).value();
}
private int getTokenFrequency(String token) {
return tokOcurrencies.get(token).value();
}
private SortedSet<String> getTokensOrderedByFrequency() {
SortedSet<String> toks = new TreeSet<>(new SimpleLabelComparator(tokOcurrencies));
toks.addAll(tokOcurrencies.keySet());
return Collections.unmodifiableSortedSet(toks);
}
private SortedSet<String> getTokensOrderedByNumberOfErrors() {
SortedSet<String> toks = new TreeSet<>(new SimpleLabelComparator(tokErrors));
toks.addAll(tokErrors.keySet());
return toks;
}
private int getTagFrequency(String tag) {
return tagOcurrencies.get(tag).value();
}
private int getTagErrors(String tag) {
return tagErrors.get(tag).value();
}
private double getTagFMeasure(String tag) {
return tagFMeasure.get(tag).getFMeasure();
}
private double getTagRecall(String tag) {
return tagFMeasure.get(tag).getRecallScore();
}
private double getTagPrecision(String tag) {
return tagFMeasure.get(tag).getPrecisionScore();
}
private SortedSet<String> getTagsOrderedByErrors() {
SortedSet<String> tags = new TreeSet<>(getLabelComparator(tagErrors));
tags.addAll(tagErrors.keySet());
return Collections.unmodifiableSortedSet(tags);
}
private SortedSet<String> getConfusionMatrixTagset() {
return getConfusionMatrixTagset(generalConfusionMatrix);
}
private double[][] getConfusionMatrix() {
return createConfusionMatrix(getConfusionMatrixTagset(),
generalConfusionMatrix);
}
private SortedSet<String> getConfusionMatrixTagset(String token) {
return getConfusionMatrixTagset(tokenConfusionMatrix.get(token));
}
private double[][] getConfusionMatrix(String token) {
return createConfusionMatrix(getConfusionMatrixTagset(token),
tokenConfusionMatrix.get(token));
}
/**
* Creates a matrix with N lines and N + 1 columns with the data from
* confusion matrix. The last column is the accuracy.
*/
private double[][] createConfusionMatrix(SortedSet<String> tagset,
Map<String, ConfusionMatrixLine> data) {
int size = tagset.size();
double[][] matrix = new double[size][size + 1];
int line = 0;
for (String ref : tagset) {
int column = 0;
for (String pred : tagset) {
matrix[line][column] = data.get(ref) != null ? data
.get(ref).getValue(pred) : 0;
column++;
}
// set accuracy
matrix[line][column] = data.get(ref) != null ? data.get(ref).getAccuracy() : 0;
line++;
}
return matrix;
}
private SortedSet<String> getConfusionMatrixTagset(
Map<String, ConfusionMatrixLine> data) {
SortedSet<String> tags = new TreeSet<>(getMatrixLabelComparator(data));
tags.addAll(data.keySet());
List<String> col = new LinkedList<>();
for (String t : tags) {
col.addAll(data.get(t).line.keySet());
}
tags.addAll(col);
return Collections.unmodifiableSortedSet(tags);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/GenerateManualTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.StringReader;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.StringTokenizer;
import opennlp.tools.cmdline.ArgumentParser.Argument;
public class GenerateManualTool {
private static final int MAX_LINE_LENGTH = 110; // optimized for printing
public static void main(String[] args) throws FileNotFoundException {
if (args.length != 1) {
System.out.print(getUsage());
System.exit(0);
}
StringBuilder sb = new StringBuilder();
appendHeader(sb);
// organize by package name
LinkedHashMap<String, Map<String, CmdLineTool>> packageNameToolMap = new LinkedHashMap<>();
for (Entry<String, CmdLineTool> entry : CLI.getToolLookupMap().entrySet()) {
final String toolName = entry.getKey();
final CmdLineTool tool = entry.getValue();
String packageName = tool.getClass().getPackage().getName();
packageName = packageName.substring(packageName.lastIndexOf(".") + 1);
if (!packageNameToolMap.containsKey(packageName)) {
packageNameToolMap.put(packageName,
new LinkedHashMap<String, CmdLineTool>());
}
packageNameToolMap.get(packageName).put(toolName, tool);
}
// add tools grouped by package
for (Entry<String, Map<String, CmdLineTool>> entry : packageNameToolMap.entrySet()) {
appendToolGroup(entry.getKey(), entry.getValue(), sb);
}
// footer
appendFooter(sb);
// output to file
try (PrintWriter out = new PrintWriter(args[0])) {
out.println(sb);
}
}
/**
* @return this tool usage
*/
private static String getUsage() {
return "Requires one argument: \n" +
" Path to the output XML file \n";
}
/**
* Appends a group of tools, based on the tool package name
*
* @param groupName
* @param toolsMap
* @param sb
*/
private static void appendToolGroup(String groupName,
Map<String, CmdLineTool> toolsMap, StringBuilder sb) {
sb.append("<section id='tools.cli.").append(groupName).append("'>\n\n");
sb.append("<title>").append(firstCaps(groupName)).append("</title>\n\n");
for (Entry<String, CmdLineTool> entry : toolsMap.entrySet()) {
appendTool(groupName, entry.getKey(), entry.getValue(), sb);
}
sb.append("</section>\n\n");
}
/**
* Appends a tool
*
* @param groupName
* @param toolName
* @param tool
* @param sb
*/
private static void appendTool(String groupName, String toolName,
CmdLineTool tool, StringBuilder sb) {
sb.append("<section id='tools.cli.").append(groupName).append(".")
.append(toolName).append("'>\n\n");
sb.append("<title>").append(toolName).append("</title>\n\n");
sb.append("<para>").append(firstCaps(tool.getShortDescription()))
.append("</para>\n\n");
appendCode(tool.getHelp(), sb);
if (TypedCmdLineTool.class.isAssignableFrom(tool.getClass())) {
appendHelpForTool((TypedCmdLineTool<?>) tool, sb);
}
sb.append("</section>\n\n");
}
private static void appendHelpForTool(TypedCmdLineTool<?> tool,
StringBuilder sb) {
Class<?> type = tool.type;
Set<String> formats = StreamFactoryRegistry.getFactories(type).keySet();
sb.append("<para>The supported formats and arguments are:</para>\n\n");
Map<String, List<Argument>> formatArguments = new LinkedHashMap<>();
for (String formatName : formats) {
if (!StreamFactoryRegistry.DEFAULT_FORMAT.equals(formatName)) {
ObjectStreamFactory<?> format = tool.getStreamFactory(formatName);
formatArguments.put(formatName,
ArgumentParser.createArguments(format.getParameters()));
}
}
appendArgumentTable(formatArguments, sb);
}
private static void appendArgumentTable(
Map<String, List<Argument>> formatArguments, StringBuilder sb) {
sb.append(
"<informaltable frame='all'><tgroup cols='4' align='left' colsep='1' rowsep='1'>\n");
sb.append(
"<thead><row><entry>Format</entry><entry>Argument</entry><entry>Value</entry>" +
"<entry>Optional</entry><entry>Description</entry></row></thead>\n");
sb.append("<tbody>\n");
for (Entry<String, List<Argument>> entry : formatArguments.entrySet()) {
final String format = entry.getKey();
final List<Argument> arguments = entry.getValue();
int i = 0;
for (Argument argument : arguments) {
sb.append("<row>\n");
if (i == 0) {
sb.append("<entry morerows='").append(arguments.size() - 1)
.append("' valign='middle'>").append(format).append("</entry>\n");
}
sb.append("<entry>").append(argument.getArgument())
.append("</entry>\n");
sb.append("<entry>").append(argument.getValue()).append("</entry>\n");
sb.append("<entry>").append(yes(argument.getOptional()))
.append("</entry>\n");
sb.append("<entry>").append(firstCaps(argument.getDescription()))
.append("</entry>\n");
sb.append("</row>\n");
i++;
}
}
sb.append("</tbody>\n");
sb.append("</tgroup></informaltable>\n\n");
}
private static void appendHeader(StringBuilder sb) {
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+ "<!DOCTYPE chapter PUBLIC \"-//OASIS//DTD DocBook XML V4.4//EN\"\n"
+ "\"http://www.oasis-open.org/docbook/xml/4.4/docbookx.dtd\"[\n"
+ "]>\n" + "<!--\n"
+ "Licensed to the Apache Software Foundation (ASF) under one\n"
+ "or more contributor license agreements. See the NOTICE file\n"
+ "distributed with this work for additional information\n"
+ "regarding copyright ownership. The ASF licenses this file\n"
+ "to you under the Apache License, Version 2.0 (the\n"
+ "\"License\"); you may not use this file except in compliance\n"
+ "with the License. You may obtain a copy of the License at\n" + "\n"
+ " http://www.apache.org/licenses/LICENSE-2.0\n" + "\n"
+ "Unless required by applicable law or agreed to in writing,\n"
+ "software distributed under the License is distributed on an\n"
+ "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n"
+ "KIND, either express or implied. See the License for the\n"
+ "specific language governing permissions and limitations\n"
+ "under the License.\n" + "-->\n" + "\n\n"
+ "<!-- ## Warning ## this content is autogenerated! Please fix issues in to " +
"opennlp-tools/src/main/java/opennlp/tools/cmdline/GenerateManualTool.java \n"
+ " and execute the following command in opennlp-tool folder to update this file: \n\n"
+ " mvn -e -q exec:java \"-Dexec.mainClass=opennlp.tools.cmdline.GenerateManualTool\" "
+ "\"-Dexec.args=../opennlp-docs/src/docbkx/cli.xml\"\n"
+ "-->\n\n" + "<chapter id='tools.cli'>\n\n"
+ "<title>The Command Line Interface</title>\n\n" + "<para>"
+ "This section details the available tools and parameters of the Command Line Interface. "
+ "For a introduction in its usage please refer to <xref linkend='intro.cli'/>. "
+ "</para>\n\n");
}
private static void appendFooter(StringBuilder sb) {
sb.append("\n\n</chapter>");
}
private static String firstCaps(String str) {
if (str.length() > 1) {
return str.substring(0, 1).toUpperCase() + str.substring(1);
} else {
return str;
}
}
private static String yes(boolean optional) {
if (optional) {
return "Yes";
}
return "No";
}
private static void appendCode(String help, StringBuilder sb) {
sb.append("<screen>\n" + "<![CDATA[\n").append(splitLongLines(help))
.append("\n").append("]]>\n").append("</screen> \n");
}
/**
* Prevents long lines. Lines are optimized for printing.
*
* @param stringBlock
* @return
*/
private static String splitLongLines(String stringBlock) {
StringBuilder sb = new StringBuilder();
String line;
try {
BufferedReader reader = new BufferedReader(new StringReader(stringBlock));
while ((line = reader.readLine()) != null) {
if (line.length() <= MAX_LINE_LENGTH) {
sb.append(line).append("\n");
} else {
StringTokenizer tok = new StringTokenizer(line, " ");
int lineLen = 0;
while (tok.hasMoreTokens()) {
String word = tok.nextToken() + " ";
if (lineLen + word.length() > MAX_LINE_LENGTH) {
sb.append("\n ");
lineLen = 8;
}
sb.append(word);
lineLen += word.length();
}
}
}
} catch (Exception e) {
// nothing to do
}
return sb.toString();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/ModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import opennlp.tools.util.InvalidFormatException;
/**
* Loads a model and does all the error handling for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*
* @param <T>
*/
public abstract class ModelLoader<T> {
private final String modelName;
protected ModelLoader(String modelName) {
this.modelName = Objects.requireNonNull(modelName, "modelName must not be null!");
}
protected abstract T loadModel(InputStream modelIn) throws IOException;
public T load(File modelFile) {
long beginModelLoadingTime = System.currentTimeMillis();
CmdLineUtil.checkInputFile(modelName + " model", modelFile);
System.err.print("Loading " + modelName + " model ... ");
T model;
try (InputStream modelIn = new BufferedInputStream(
CmdLineUtil.openInFile(modelFile), CmdLineUtil.IO_BUFFER_SIZE)) {
model = loadModel(modelIn);
}
catch (InvalidFormatException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "Model has invalid format", e);
}
catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "IO error while loading model file '" + modelFile + "'", e);
}
long modelLoadingDuration = System.currentTimeMillis() - beginModelLoadingTime;
System.err.printf("done (%.3fs)\n", modelLoadingDuration / 1000d);
return model;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/ObjectStreamFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import opennlp.tools.util.ObjectStream;
public interface ObjectStreamFactory<T> {
/**
* Returns interface with parameters description.
*
* @param <P> interfaces which describes the parameters.
*
* @return interface with parameters description
*/
<P> Class<P> getParameters();
/**
* Creates the <code>ObjectStream</code>.
*
* @param args arguments
* @return ObjectStream instance
*/
ObjectStream<T> create(String[] args);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/PerformanceMonitor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.PrintStream;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
/**
* The {@link PerformanceMonitor} measures increments to a counter.
* During the computation it prints out current and average throughput
* per second. After the computation is done it prints a final performance
* report.
* <p>
* <b>Note:</b>
* This class is not thread safe. <br>
* Do not use this class, internal use only!
*/
public class PerformanceMonitor {
private ScheduledExecutorService scheduler =
Executors.newScheduledThreadPool(1);
private final String unit;
private ScheduledFuture<?> beeperHandle;
private volatile long startTime = -1;
private volatile int counter;
private final PrintStream out;
public PerformanceMonitor(PrintStream out, String unit) {
this.out = out;
this.unit = unit;
}
public PerformanceMonitor(String unit) {
this(System.out, unit);
}
public boolean isStarted() {
return startTime != -1;
}
public void incrementCounter(int increment) {
if (!isStarted())
throw new IllegalStateException("Must be started first!");
if (increment < 0)
throw new IllegalArgumentException("increment must be zero or positive but was " + increment + "!");
counter += increment;
}
public void incrementCounter() {
incrementCounter(1);
}
public void start() {
if (isStarted())
throw new IllegalStateException("Already started!");
startTime = System.currentTimeMillis();
}
public void startAndPrintThroughput() {
start();
final Runnable beeper = new Runnable() {
private long lastTimeStamp = startTime;
private int lastCount = counter;
public void run() {
int deltaCount = counter - lastCount;
long timePassedSinceLastCount = System.currentTimeMillis()
- lastTimeStamp;
double currentThroughput;
if (timePassedSinceLastCount > 0) {
currentThroughput = deltaCount / ((double) timePassedSinceLastCount / 1000);
} else {
currentThroughput = 0;
}
long totalTimePassed = System.currentTimeMillis() - startTime;
double averageThroughput;
if (totalTimePassed > 0) {
averageThroughput = counter / (((double) totalTimePassed) / 1000);
}
else {
averageThroughput = 0;
}
out.printf("current: %.1f " + unit + "/s avg: %.1f " + unit + "/s total: %d "
+ unit + "%n", currentThroughput, averageThroughput, counter);
lastTimeStamp = System.currentTimeMillis();
lastCount = counter;
}
};
beeperHandle = scheduler.scheduleAtFixedRate(beeper, 1, 1, TimeUnit.SECONDS);
}
public void stopAndPrintFinalResult() {
if (!isStarted())
throw new IllegalStateException("Must be started first!");
if (beeperHandle != null) {
// yeah we have time to finish current
// printing if there is one
beeperHandle.cancel(false);
}
scheduler.shutdown();
long timePassed = System.currentTimeMillis() - startTime;
double average;
if (timePassed > 0) {
average = counter / (timePassed / 1000d);
}
else {
average = 0;
}
out.println();
out.println();
out.printf("Average: %.1f " + unit + "/s %n", average);
out.println("Total: " + counter + " " + unit);
out.println("Runtime: " + timePassed / 1000d + "s");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/StreamFactoryRegistry.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.formats.BioNLP2004NameSampleStreamFactory;
import opennlp.tools.formats.ChunkerSampleStreamFactory;
import opennlp.tools.formats.Conll02NameSampleStreamFactory;
import opennlp.tools.formats.Conll03NameSampleStreamFactory;
import opennlp.tools.formats.ConllXPOSSampleStreamFactory;
import opennlp.tools.formats.ConllXSentenceSampleStreamFactory;
import opennlp.tools.formats.ConllXTokenSampleStreamFactory;
import opennlp.tools.formats.DocumentSampleStreamFactory;
import opennlp.tools.formats.EvalitaNameSampleStreamFactory;
import opennlp.tools.formats.LanguageDetectorSampleStreamFactory;
import opennlp.tools.formats.LeipzigDocumentSampleStreamFactory;
import opennlp.tools.formats.LemmatizerSampleStreamFactory;
import opennlp.tools.formats.NameSampleDataStreamFactory;
import opennlp.tools.formats.ParseSampleStreamFactory;
import opennlp.tools.formats.SentenceSampleStreamFactory;
import opennlp.tools.formats.TokenSampleStreamFactory;
import opennlp.tools.formats.WordTagSampleStreamFactory;
import opennlp.tools.formats.ad.ADChunkSampleStreamFactory;
import opennlp.tools.formats.ad.ADNameSampleStreamFactory;
import opennlp.tools.formats.ad.ADPOSSampleStreamFactory;
import opennlp.tools.formats.ad.ADSentenceSampleStreamFactory;
import opennlp.tools.formats.ad.ADTokenSampleStreamFactory;
import opennlp.tools.formats.brat.BratNameSampleStreamFactory;
import opennlp.tools.formats.conllu.ConlluLemmaSampleStreamFactory;
import opennlp.tools.formats.conllu.ConlluPOSSampleStreamFactory;
import opennlp.tools.formats.conllu.ConlluSentenceSampleStreamFactory;
import opennlp.tools.formats.conllu.ConlluTokenSampleStreamFactory;
import opennlp.tools.formats.convert.NameToSentenceSampleStreamFactory;
import opennlp.tools.formats.convert.NameToTokenSampleStreamFactory;
import opennlp.tools.formats.convert.POSToSentenceSampleStreamFactory;
import opennlp.tools.formats.convert.POSToTokenSampleStreamFactory;
import opennlp.tools.formats.convert.ParseToPOSSampleStreamFactory;
import opennlp.tools.formats.convert.ParseToSentenceSampleStreamFactory;
import opennlp.tools.formats.convert.ParseToTokenSampleStreamFactory;
import opennlp.tools.formats.frenchtreebank.ConstitParseSampleStreamFactory;
import opennlp.tools.formats.irishsentencebank.IrishSentenceBankSentenceStreamFactory;
import opennlp.tools.formats.irishsentencebank.IrishSentenceBankTokenSampleStreamFactory;
import opennlp.tools.formats.leipzig.LeipzigLanguageSampleStreamFactory;
import opennlp.tools.formats.letsmt.LetsmtSentenceStreamFactory;
import opennlp.tools.formats.moses.MosesSentenceSampleStreamFactory;
import opennlp.tools.formats.muc.Muc6NameSampleStreamFactory;
import opennlp.tools.formats.ontonotes.OntoNotesNameSampleStreamFactory;
import opennlp.tools.formats.ontonotes.OntoNotesPOSSampleStreamFactory;
import opennlp.tools.formats.ontonotes.OntoNotesParseSampleStreamFactory;
/**
* Registry for object stream factories.
*/
public final class StreamFactoryRegistry {
private static final Map<Class, Map<String, ObjectStreamFactory>> registry = new HashMap<>();
static {
ChunkerSampleStreamFactory.registerFactory();
DocumentSampleStreamFactory.registerFactory();
NameSampleDataStreamFactory.registerFactory();
ParseSampleStreamFactory.registerFactory();
SentenceSampleStreamFactory.registerFactory();
TokenSampleStreamFactory.registerFactory();
WordTagSampleStreamFactory.registerFactory();
LemmatizerSampleStreamFactory.registerFactory();
LanguageDetectorSampleStreamFactory.registerFactory();
NameToSentenceSampleStreamFactory.registerFactory();
NameToTokenSampleStreamFactory.registerFactory();
POSToSentenceSampleStreamFactory.registerFactory();
POSToTokenSampleStreamFactory.registerFactory();
ParseToPOSSampleStreamFactory.registerFactory();
ParseToSentenceSampleStreamFactory.registerFactory();
ParseToTokenSampleStreamFactory.registerFactory();
OntoNotesNameSampleStreamFactory.registerFactory();
OntoNotesParseSampleStreamFactory.registerFactory();
OntoNotesPOSSampleStreamFactory.registerFactory();
BioNLP2004NameSampleStreamFactory.registerFactory();
Conll02NameSampleStreamFactory.registerFactory();
Conll03NameSampleStreamFactory.registerFactory();
EvalitaNameSampleStreamFactory.registerFactory();
ConllXPOSSampleStreamFactory.registerFactory();
ConllXSentenceSampleStreamFactory.registerFactory();
ConllXTokenSampleStreamFactory.registerFactory();
LeipzigDocumentSampleStreamFactory.registerFactory();
ADChunkSampleStreamFactory.registerFactory();
ADNameSampleStreamFactory.registerFactory();
ADSentenceSampleStreamFactory.registerFactory();
ADPOSSampleStreamFactory.registerFactory();
ADTokenSampleStreamFactory.registerFactory();
Muc6NameSampleStreamFactory.registerFactory();
ConstitParseSampleStreamFactory.registerFactory();
BratNameSampleStreamFactory.registerFactory();
LetsmtSentenceStreamFactory.registerFactory();
MosesSentenceSampleStreamFactory.registerFactory();
ConlluTokenSampleStreamFactory.registerFactory();
ConlluSentenceSampleStreamFactory.registerFactory();
ConlluPOSSampleStreamFactory.registerFactory();
ConlluLemmaSampleStreamFactory.registerFactory();
IrishSentenceBankSentenceStreamFactory.registerFactory();
IrishSentenceBankTokenSampleStreamFactory.registerFactory();
LeipzigLanguageSampleStreamFactory.registerFactory();
}
public static final String DEFAULT_FORMAT = "opennlp";
private StreamFactoryRegistry() {
// not intended to be instantiated
}
/**
* Registers <code>factory</code> which reads format named <code>formatName</code> and
* instantiates streams producing objects of <code>sampleClass</code> class.
*
* @param sampleClass class of the objects, produced by the streams instantiated by the factory
* @param formatName name of the format
* @param factory instance of the factory
* @return true if the factory was successfully registered
*/
public static boolean registerFactory(Class sampleClass,
String formatName,
ObjectStreamFactory factory) {
boolean result;
Map<String, ObjectStreamFactory> formats = registry.get(sampleClass);
if (null == formats) {
formats = new HashMap<>();
}
if (!formats.containsKey(formatName)) {
formats.put(formatName, factory);
registry.put(sampleClass, formats);
result = true;
} else {
result = false;
}
return result;
}
/**
* Unregisters a factory which reads format named <code>formatName</code> and
* instantiates streams producing objects of <code>sampleClass</code> class.
*
* @param sampleClass class of the objects, produced by the streams instantiated by the factory
* @param formatName name of the format
*/
public static void unregisterFactory(Class sampleClass, String formatName) {
Map<String, ObjectStreamFactory> formats = registry.get(sampleClass);
if (null != formats) {
if (formats.containsKey(formatName)) {
formats.remove(formatName);
}
}
}
/**
* Returns all factories which produce objects of <code>sampleClass</code> class.
*
* @param sampleClass class of the objects, produced by the streams instantiated by the factory
* @return formats mapped to factories
*/
@SuppressWarnings("unchecked")
public static <T> Map<String, ObjectStreamFactory<T>> getFactories(Class<T> sampleClass) {
return (Map<String, ObjectStreamFactory<T>>) (Object) registry.get(sampleClass);
}
/**
* Returns a factory which reads format named <code>formatName</code> and
* instantiates streams producing objects of <code>sampleClass</code> class.
*
* @param sampleClass class of the objects, produced by the streams instantiated by the factory
* @param formatName name of the format, if null, assumes OpenNLP format
* @return factory instance
*/
@SuppressWarnings("unchecked")
public static <T> ObjectStreamFactory<T> getFactory(Class<T> sampleClass,
String formatName) {
if (null == formatName) {
formatName = DEFAULT_FORMAT;
}
ObjectStreamFactory<T> factory = registry.containsKey(sampleClass) ?
registry.get(sampleClass).get(formatName) : null;
if (factory != null) {
return factory;
}
else {
try {
Class<?> factoryClazz = Class.forName(formatName);
// TODO: Need to check if it can produce the desired output
// Otherwise there will be class cast exceptions later in the flow
try {
return (ObjectStreamFactory<T>) factoryClazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
return null;
}
} catch (ClassNotFoundException e) {
return null;
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/SystemInputStreamFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import opennlp.tools.util.InputStreamFactory;
public class SystemInputStreamFactory implements InputStreamFactory {
private boolean isTainted = false;
public static Charset encoding() {
return Charset.defaultCharset();
}
@Override
public InputStream createInputStream() throws IOException {
if (!isTainted) {
isTainted = true;
return System.in;
}
else {
throw new UnsupportedOperationException(
"The System.in stream can't be re-created to read from the beginning!");
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/TerminateToolException.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
/**
* Exception to terminate the execution of a command line tool.
* <p>
* The exception should be thrown to indicate that the VM should be terminated with
* the specified error code, instead of just calling {@link System#exit(int)}.
* <p>
* The return code convention is to return:<br>
* 0 in case of graceful termination<br>
* -1 in case of runtime errors, such as IOException<br>
* 1 in case of invalid parameters.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
@SuppressWarnings("serial")
public class TerminateToolException extends RuntimeException {
private final int code;
private final String message;
public TerminateToolException(int code, String message, Throwable t) {
super(t);
this.code = code;
this.message = message;
}
public TerminateToolException(int code, String message) {
this.code = code;
this.message = message;
}
public TerminateToolException(int code) {
this(code, null);
}
public int getCode() {
return code;
}
@Override
public String getMessage() {
return message;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/TypedCmdLineTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline;
import java.util.Map;
/**
* Base class for tools which support processing of samples of some type T
* coming from a stream of a certain format.
*/
public abstract class TypedCmdLineTool<T>
extends CmdLineTool {
/**
* variable to access the type of the generic parameter.
*/
protected final Class<T> type;
/**
* Constructor with type parameters.
*
* @param sampleType class of the template parameter
*/
protected TypedCmdLineTool(Class<T> sampleType) {
this.type = sampleType;
}
/**
* Returns stream factory for the type of this tool for the <code>format</code>.
*
* @param format data format name
* @return stream factory for the type of this tool for the format
*/
protected ObjectStreamFactory<T> getStreamFactory(String format) {
ObjectStreamFactory<T> factory = StreamFactoryRegistry.getFactory(type, format);
if (null != factory) {
return factory;
} else {
throw new TerminateToolException(1, "Format " + format + " is not found.\n" + getHelp());
}
}
/**
* Validates arguments using parameters from <code>argProxyInterface</code> and the parameters of the
* <code>format</code>.
*
* @param args arguments
* @param argProxyInterface interface with parameter descriptions
* @param format data format name
* @param <A> A
*/
@SuppressWarnings({"unchecked"})
protected <A> void validateAllArgs(String[] args, Class<A> argProxyInterface, String format) {
ObjectStreamFactory<T> factory = getStreamFactory(format);
String errMessage = ArgumentParser.validateArgumentsLoudly(args, argProxyInterface,
factory.<A>getParameters());
if (null != errMessage) {
throw new TerminateToolException(1, errMessage + "\n" + getHelp(format));
}
}
/**
* Validates arguments for a format processed by the <code>factory</code>.
* @param factory a stream factory
* @param args arguments
*/
protected void validateFactoryArgs(ObjectStreamFactory<T> factory, String[] args) {
String errMessage = ArgumentParser.validateArgumentsLoudly(args, factory.getParameters());
if (null != errMessage) {
throw new TerminateToolException(1, "Format parameters are invalid: " + errMessage + "\n" +
"Usage: " + ArgumentParser.createUsage(factory.getParameters()));
}
}
@Override
protected String getBasicHelp(Class<?>... argProxyInterfaces) {
Map<String, ObjectStreamFactory<T>> factories = StreamFactoryRegistry.getFactories(type);
String formatsHelp = " ";
if (1 < factories.size()) {
StringBuilder formats = new StringBuilder();
for (String format : factories.keySet()) {
if (!StreamFactoryRegistry.DEFAULT_FORMAT.equals(format)) {
formats.append(".").append(format).append("|");
}
}
formatsHelp = "[" + formats.substring(0, formats.length() - 1) + "] ";
}
return "Usage: " + CLI.CMD + " " + getName() + formatsHelp +
ArgumentParser.createUsage(argProxyInterfaces);
}
public String getHelp() {
return getHelp("");
}
/**
* Executes the tool with the given parameters.
*
* @param format format to work with
* @param args command line arguments
*/
public abstract void run(String format, String[] args);
/**
* Retrieves a description on how to use the tool.
*
* @param format data format
* @return a description on how to use the tool
*/
public abstract String getHelp(String format);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkEvaluationErrorListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.OutputStream;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerEvaluationMonitor;
import opennlp.tools.cmdline.EvaluationErrorPrinter;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* A default implementation of {@link EvaluationMonitor} that prints
* to an output stream.
*
*/
public class ChunkEvaluationErrorListener extends
EvaluationErrorPrinter<ChunkSample> implements ChunkerEvaluationMonitor {
/**
* Creates a listener that will print to System.err
*/
public ChunkEvaluationErrorListener() {
super(System.err);
}
/**
* Creates a listener that will print to a given {@link OutputStream}
*/
public ChunkEvaluationErrorListener(OutputStream outputStream) {
super(outputStream);
}
@Override
public void missclassified(ChunkSample reference, ChunkSample prediction) {
printError(reference.getPhrasesAsSpanList(),
prediction.getPhrasesAsSpanList(), reference, prediction,
reference.getSentence());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerConverterTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.cmdline.AbstractConverterTool;
/**
* Tool to convert multiple data formats into native OpenNLP chunker training
* format.
*/
public class ChunkerConverterTool extends AbstractConverterTool<ChunkSample> {
public ChunkerConverterTool() {
super(ChunkSample.class);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerCrossValidatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerCrossValidator;
import opennlp.tools.chunker.ChunkerEvaluationMonitor;
import opennlp.tools.chunker.ChunkerFactory;
import opennlp.tools.cmdline.AbstractCrossValidatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.chunker.ChunkerCrossValidatorTool.CVToolParams;
import opennlp.tools.cmdline.params.CVParams;
import opennlp.tools.cmdline.params.DetailedFMeasureEvaluatorParams;
import opennlp.tools.util.eval.EvaluationMonitor;
import opennlp.tools.util.eval.FMeasure;
import opennlp.tools.util.model.ModelUtil;
public final class ChunkerCrossValidatorTool
extends AbstractCrossValidatorTool<ChunkSample, CVToolParams> {
interface CVToolParams extends TrainingParams, CVParams, DetailedFMeasureEvaluatorParams {
}
public ChunkerCrossValidatorTool() {
super(ChunkSample.class, CVToolParams.class);
}
public String getShortDescription() {
return "K-fold cross validator for the chunker";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<>();
ChunkerDetailedFMeasureListener detailedFMeasureListener = null;
if (params.getMisclassified()) {
listeners.add(new ChunkEvaluationErrorListener());
}
if (params.getDetailedF()) {
detailedFMeasureListener = new ChunkerDetailedFMeasureListener();
listeners.add(detailedFMeasureListener);
}
ChunkerCrossValidator validator;
try {
ChunkerFactory chunkerFactory = ChunkerFactory
.create(params.getFactory());
validator = new ChunkerCrossValidator(params.getLang(), mlParams,
chunkerFactory,
listeners.toArray(new ChunkerEvaluationMonitor[listeners.size()]));
validator.evaluate(sampleStream, params.getFolds());
}
catch (IOException e) {
throw createTerminationIOException(e);
}
finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
if (detailedFMeasureListener == null) {
FMeasure result = validator.getFMeasure();
System.out.println(result.toString());
} else {
System.out.println(detailedFMeasureListener.toString());
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerDetailedFMeasureListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerEvaluationMonitor;
import opennlp.tools.cmdline.DetailedFMeasureListener;
import opennlp.tools.util.Span;
public class ChunkerDetailedFMeasureListener extends
DetailedFMeasureListener<ChunkSample> implements ChunkerEvaluationMonitor {
@Override
protected Span[] asSpanArray(ChunkSample sample) {
return sample.getPhrasesAsSpanList();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerEvaluationMonitor;
import opennlp.tools.chunker.ChunkerEvaluator;
import opennlp.tools.chunker.ChunkerME;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.cmdline.AbstractEvaluatorTool;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.chunker.ChunkerEvaluatorTool.EvalToolParams;
import opennlp.tools.cmdline.params.DetailedFMeasureEvaluatorParams;
import opennlp.tools.cmdline.params.EvaluatorParams;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.eval.EvaluationMonitor;
public final class ChunkerEvaluatorTool
extends AbstractEvaluatorTool<ChunkSample, EvalToolParams> {
interface EvalToolParams extends EvaluatorParams, DetailedFMeasureEvaluatorParams {
}
public ChunkerEvaluatorTool() {
super(ChunkSample.class, EvalToolParams.class);
}
public String getShortDescription() {
return "Measures the performance of the Chunker model with the reference data";
}
public void run(String format, String[] args) {
super.run(format, args);
ChunkerModel model = new ChunkerModelLoader().load(params.getModel());
List<EvaluationMonitor<ChunkSample>> listeners = new LinkedList<>();
ChunkerDetailedFMeasureListener detailedFMeasureListener = null;
if (params.getMisclassified()) {
listeners.add(new ChunkEvaluationErrorListener());
}
if (params.getDetailedF()) {
detailedFMeasureListener = new ChunkerDetailedFMeasureListener();
listeners.add(detailedFMeasureListener);
}
ChunkerEvaluator evaluator = new ChunkerEvaluator(new ChunkerME(model),
listeners.toArray(new ChunkerEvaluationMonitor[listeners.size()]));
final PerformanceMonitor monitor = new PerformanceMonitor("sent");
ObjectStream<ChunkSample> measuredSampleStream = new ObjectStream<ChunkSample>() {
public ChunkSample read() throws IOException {
monitor.incrementCounter();
return sampleStream.read();
}
public void reset() throws IOException {
sampleStream.reset();
}
public void close() throws IOException {
sampleStream.close();
}
};
monitor.startAndPrintThroughput();
try {
evaluator.evaluate(measuredSampleStream);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "IO error while reading test data: " + e.getMessage(), e);
} finally {
try {
measuredSampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
monitor.stopAndPrintFinalResult();
System.out.println();
if (detailedFMeasureListener == null) {
System.out.println(evaluator.getFMeasure());
} else {
System.out.println(detailedFMeasureListener.toString());
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerMETool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.File;
import java.io.IOException;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerME;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.postag.POSSample;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
public class ChunkerMETool extends BasicCmdLineTool {
public String getShortDescription() {
return "learnable chunker";
}
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model < sentences";
}
public void run(String[] args) {
if (args.length != 1) {
System.out.println(getHelp());
} else {
ChunkerModel model = new ChunkerModelLoader().load(new File(args[0]));
ChunkerME chunker = new ChunkerME(model);
ObjectStream<String> lineStream;
PerformanceMonitor perfMon = null;
try {
lineStream = new PlainTextByLineStream(new SystemInputStreamFactory(),
SystemInputStreamFactory.encoding());
perfMon = new PerformanceMonitor(System.err, "sent");
perfMon.start();
String line;
while ((line = lineStream.read()) != null) {
POSSample posSample;
try {
posSample = POSSample.parse(line);
} catch (InvalidFormatException e) {
System.err.println("Invalid format:");
System.err.println(line);
continue;
}
String[] chunks = chunker.chunk(posSample.getSentence(), posSample.getTags());
System.out.println(new ChunkSample(posSample.getSentence(),
posSample.getTags(), chunks).nicePrint());
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.cmdline.ModelLoader;
/**
* Loads a Chunker Model for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public class ChunkerModelLoader extends ModelLoader<ChunkerModel> {
public ChunkerModelLoader() {
super("Chunker");
}
@Override
protected ChunkerModel loadModel(InputStream modelIn) throws IOException {
return new ChunkerModel(modelIn);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/ChunkerTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import java.io.File;
import java.io.IOException;
import opennlp.tools.chunker.ChunkSample;
import opennlp.tools.chunker.ChunkerFactory;
import opennlp.tools.chunker.ChunkerME;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.cmdline.AbstractTrainerTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.chunker.ChunkerTrainerTool.TrainerToolParams;
import opennlp.tools.cmdline.params.TrainingToolParams;
import opennlp.tools.util.model.ModelUtil;
public class ChunkerTrainerTool
extends AbstractTrainerTool<ChunkSample, TrainerToolParams> {
interface TrainerToolParams extends TrainingParams, TrainingToolParams {
}
public ChunkerTrainerTool() {
super(ChunkSample.class, TrainerToolParams.class);
}
public String getName() {
return "ChunkerTrainerME";
}
public String getShortDescription() {
return "trainer for the learnable chunker";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
File modelOutFile = params.getModel();
CmdLineUtil.checkOutputFile("sentence detector model", modelOutFile);
ChunkerModel model;
try {
ChunkerFactory chunkerFactory = ChunkerFactory
.create(params.getFactory());
model = ChunkerME.train(params.getLang(), sampleStream, mlParams,
chunkerFactory);
} catch (IOException e) {
throw createTerminationIOException(e);
}
finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
CmdLineUtil.writeModel("chunker", modelOutFile, model);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/chunker/TrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.chunker;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.params.BasicTrainingParams;
/**
* TrainingParams for Chunker.
*
* Note: Do not use this class, internal use only!
*/
interface TrainingParams extends BasicTrainingParams {
@ParameterDescription(valueName = "factoryName",
description = "A sub-class of ChunkerFactory where to get implementation and resources.")
@OptionalParameter
String getFactory();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/dictionary/DictionaryBuilderParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.dictionary;
import java.io.File;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.params.EncodingParameter;
/**
* Params for Dictionary tools.
*
* Note: Do not use this class, internal use only!
*/
interface DictionaryBuilderParams extends EncodingParameter {
@ParameterDescription(valueName = "in", description = "Plain file with one entry per line")
File getInputFile();
@ParameterDescription(valueName = "out", description = "The dictionary file.")
File getOutputFile();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/dictionary/DictionaryBuilderTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.dictionary;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.charset.Charset;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.dictionary.Dictionary;
public class DictionaryBuilderTool extends BasicCmdLineTool {
interface Params extends DictionaryBuilderParams {
}
public String getShortDescription() {
return "builds a new dictionary";
}
public String getHelp() {
return getBasicHelp(Params.class);
}
public void run(String[] args) {
Params params = validateAndParseParams(args, Params.class);
File dictInFile = params.getInputFile();
File dictOutFile = params.getOutputFile();
Charset encoding = params.getEncoding();
CmdLineUtil.checkInputFile("dictionary input file", dictInFile);
CmdLineUtil.checkOutputFile("dictionary output file", dictOutFile);
try (InputStreamReader in = new InputStreamReader(new FileInputStream(dictInFile), encoding);
OutputStream out = new FileOutputStream(dictOutFile)) {
Dictionary dict = Dictionary.parseOneEntryPerLine(in);
dict.serialize(out);
} catch (IOException e) {
throw new TerminateToolException(-1, "IO error while reading training data or indexing data: "
+ e.getMessage(), e);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatConverterTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import opennlp.tools.cmdline.AbstractConverterTool;
import opennlp.tools.doccat.DocumentSample;
public class DoccatConverterTool extends AbstractConverterTool<DocumentSample> {
public DoccatConverterTool() {
super(DocumentSample.class);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatCrossValidatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.cmdline.AbstractCrossValidatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.doccat.DoccatCrossValidatorTool.CVToolParams;
import opennlp.tools.cmdline.params.CVParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.doccat.DoccatCrossValidator;
import opennlp.tools.doccat.DoccatEvaluationMonitor;
import opennlp.tools.doccat.DoccatFactory;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.doccat.FeatureGenerator;
import opennlp.tools.util.eval.EvaluationMonitor;
import opennlp.tools.util.model.ModelUtil;
public final class DoccatCrossValidatorTool extends
AbstractCrossValidatorTool<DocumentSample, CVToolParams> {
interface CVToolParams extends CVParams, TrainingParams, FineGrainedEvaluatorParams {
}
public DoccatCrossValidatorTool() {
super(DocumentSample.class, CVToolParams.class);
}
public String getShortDescription() {
return "K-fold cross validator for the learnable Document Categorizer";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
List<EvaluationMonitor<DocumentSample>> listeners = new LinkedList<>();
if (params.getMisclassified()) {
listeners.add(new DoccatEvaluationErrorListener());
}
DoccatFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new DoccatFineGrainedReportListener(reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw createTerminationIOException(e);
}
}
FeatureGenerator[] featureGenerators = DoccatTrainerTool
.createFeatureGenerators(params.getFeatureGenerators());
DoccatEvaluationMonitor[] listenersArr = listeners
.toArray(new DoccatEvaluationMonitor[listeners.size()]);
DoccatCrossValidator validator;
try {
DoccatFactory factory = DoccatFactory.create(params.getFactory(), featureGenerators);
validator = new DoccatCrossValidator(params.getLang(), mlParams,
factory, listenersArr);
validator.evaluate(sampleStream, params.getFolds());
} catch (IOException e) {
throw new TerminateToolException(-1,
"IO error while reading training data or indexing data: " + e.getMessage(), e);
} finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
System.out.println("done");
if (reportListener != null) {
System.out.println("Writing fine-grained report to "
+ params.getReportOutputFile().getAbsolutePath());
reportListener.writeReport();
try {
// TODO: is it a problem to close the stream now?
reportOutputStream.close();
} catch (IOException e) {
// nothing to do
}
}
System.out.println();
System.out.println("Accuracy: " + validator.getDocumentAccuracy() + "\n" +
"Number of documents: " + validator.getDocumentCount());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatEvaluationErrorListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.OutputStream;
import opennlp.tools.cmdline.EvaluationErrorPrinter;
import opennlp.tools.doccat.DoccatEvaluationMonitor;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* A default implementation of {@link EvaluationMonitor} that prints to an
* output stream.
*
*/
public class DoccatEvaluationErrorListener extends
EvaluationErrorPrinter<DocumentSample> implements DoccatEvaluationMonitor {
/**
* Creates a listener that will print to System.err
*/
public DoccatEvaluationErrorListener() {
super(System.err);
}
/**
* Creates a listener that will print to a given {@link OutputStream}
*/
public DoccatEvaluationErrorListener(OutputStream outputStream) {
super(outputStream);
}
@Override
public void missclassified(DocumentSample reference, DocumentSample prediction) {
printError(reference, prediction);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.cmdline.AbstractEvaluatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.doccat.DoccatEvaluatorTool.EvalToolParams;
import opennlp.tools.cmdline.params.EvaluatorParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.doccat.DoccatEvaluationMonitor;
import opennlp.tools.doccat.DoccatModel;
import opennlp.tools.doccat.DocumentCategorizerEvaluator;
import opennlp.tools.doccat.DocumentCategorizerME;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.eval.EvaluationMonitor;
public final class DoccatEvaluatorTool extends
AbstractEvaluatorTool<DocumentSample, EvalToolParams> {
interface EvalToolParams extends EvaluatorParams, FineGrainedEvaluatorParams {
}
public DoccatEvaluatorTool() {
super(DocumentSample.class, EvalToolParams.class);
}
public String getShortDescription() {
return "Measures the performance of the Doccat model with the reference data";
}
public void run(String format, String[] args) {
super.run(format, args);
DoccatModel model = new DoccatModelLoader().load(params.getModel());
List<EvaluationMonitor<DocumentSample>> listeners = new LinkedList<>();
if (params.getMisclassified()) {
listeners.add(new DoccatEvaluationErrorListener());
}
DoccatFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new DoccatFineGrainedReportListener(reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1,
"IO error while creating Doccat fine-grained report file: "
+ e.getMessage());
}
}
DocumentCategorizerEvaluator evaluator = new DocumentCategorizerEvaluator(
new DocumentCategorizerME(model),
listeners.toArray(new DoccatEvaluationMonitor[listeners.size()]));
final PerformanceMonitor monitor = new PerformanceMonitor("doc");
ObjectStream<DocumentSample> measuredSampleStream = new ObjectStream<DocumentSample>() {
public DocumentSample read() throws IOException {
monitor.incrementCounter();
return sampleStream.read();
}
public void reset() throws IOException {
sampleStream.reset();
}
public void close() throws IOException {
sampleStream.close();
}
};
monitor.startAndPrintThroughput();
try {
evaluator.evaluate(measuredSampleStream);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "IO error while reading test data: "
+ e.getMessage(), e);
} finally {
try {
measuredSampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
monitor.stopAndPrintFinalResult();
System.out.println();
System.out.println(evaluator);
if (reportListener != null) {
System.out.println("Writing fine-grained report to "
+ params.getReportOutputFile().getAbsolutePath());
reportListener.writeReport();
try {
// TODO: is it a problem to close the stream now?
reportOutputStream.close();
} catch (IOException e) {
// nothing to do
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatFineGrainedReportListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.OutputStream;
import opennlp.tools.cmdline.FineGrainedReportListener;
import opennlp.tools.doccat.DoccatEvaluationMonitor;
import opennlp.tools.doccat.DocumentSample;
/**
* Generates a detailed report for the POS Tagger.
* <p>
* It is possible to use it from an API and access the statistics using the
* provided getters
*/
public class DoccatFineGrainedReportListener
extends FineGrainedReportListener implements DoccatEvaluationMonitor {
/**
* Creates a listener that will print to {@link System#err}
*/
public DoccatFineGrainedReportListener() {
this(System.err);
}
/**
* Creates a listener that prints to a given {@link OutputStream}
*/
public DoccatFineGrainedReportListener(OutputStream outputStream) {
super(outputStream);
}
// methods inherited from EvaluationMonitor
public void missclassified(DocumentSample reference, DocumentSample prediction) {
statsAdd(reference, prediction);
}
public void correctlyClassified(DocumentSample reference, DocumentSample prediction) {
statsAdd(reference, prediction);
}
private void statsAdd(DocumentSample reference, DocumentSample prediction) {
getStats().add(reference.getText(), reference.getCategory(), prediction.getCategory());
}
public void writeReport() {
printGeneralStatistics();
printTagsErrorRank();
printGeneralConfusionTable();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.cmdline.ModelLoader;
import opennlp.tools.doccat.DoccatModel;
/**
* Loads a Document Categorizer Model for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public class DoccatModelLoader extends ModelLoader<DoccatModel> {
public DoccatModelLoader() {
super("Document Categorizer");
}
@Override
protected DoccatModel loadModel(InputStream modelIn) throws IOException {
return new DoccatModel(modelIn);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.doccat.DoccatModel;
import opennlp.tools.doccat.DocumentCategorizerME;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.tokenize.WhitespaceTokenizer;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.ParagraphStream;
import opennlp.tools.util.PlainTextByLineStream;
public class DoccatTool extends BasicCmdLineTool {
@Override
public String getShortDescription() {
return "learned document categorizer";
}
@Override
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model < documents";
}
@Override
public void run(String[] args) {
if (0 == args.length) {
System.out.println(getHelp());
} else {
DoccatModel model = new DoccatModelLoader().load(new File(args[0]));
DocumentCategorizerME documentCategorizerME = new DocumentCategorizerME(model);
/*
* moved initialization to the try block to catch new IOException
*/
ObjectStream<String> documentStream;
PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "doc");
perfMon.start();
try {
documentStream = new ParagraphStream(new PlainTextByLineStream(
new SystemInputStreamFactory(), SystemInputStreamFactory.encoding()));
String document;
while ((document = documentStream.read()) != null) {
String[] tokens = WhitespaceTokenizer.INSTANCE.tokenize(document);
double[] prob = documentCategorizerME.categorize(tokens);
String category = documentCategorizerME.getBestCategory(prob);
DocumentSample sample = new DocumentSample(category, tokens);
System.out.println(sample.toString());
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/DoccatTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.AbstractTrainerTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.doccat.DoccatTrainerTool.TrainerToolParams;
import opennlp.tools.cmdline.params.TrainingToolParams;
import opennlp.tools.doccat.BagOfWordsFeatureGenerator;
import opennlp.tools.doccat.DoccatFactory;
import opennlp.tools.doccat.DoccatModel;
import opennlp.tools.doccat.DocumentCategorizerME;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.doccat.FeatureGenerator;
import opennlp.tools.tokenize.Tokenizer;
import opennlp.tools.tokenize.WhitespaceTokenizer;
import opennlp.tools.util.ext.ExtensionLoader;
import opennlp.tools.util.model.ModelUtil;
public class DoccatTrainerTool
extends AbstractTrainerTool<DocumentSample, TrainerToolParams> {
interface TrainerToolParams extends TrainingParams, TrainingToolParams {
}
public DoccatTrainerTool() {
super(DocumentSample.class, TrainerToolParams.class);
}
@Override
public String getShortDescription() {
return "trainer for the learnable document categorizer";
}
@Override
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
File modelOutFile = params.getModel();
CmdLineUtil.checkOutputFile("document categorizer model", modelOutFile);
FeatureGenerator[] featureGenerators = createFeatureGenerators(params
.getFeatureGenerators());
DoccatModel model;
try {
DoccatFactory factory = DoccatFactory.create(params.getFactory(), featureGenerators);
model = DocumentCategorizerME.train(params.getLang(), sampleStream,
mlParams, factory);
} catch (IOException e) {
throw createTerminationIOException(e);
}
finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
CmdLineUtil.writeModel("document categorizer", modelOutFile, model);
}
static Tokenizer createTokenizer(String tokenizer) {
if (tokenizer != null) {
return ExtensionLoader.instantiateExtension(Tokenizer.class, tokenizer);
}
return WhitespaceTokenizer.INSTANCE;
}
static FeatureGenerator[] createFeatureGenerators(String featureGeneratorsNames) {
if (featureGeneratorsNames == null) {
return new FeatureGenerator[]{new BagOfWordsFeatureGenerator()};
}
String[] classes = featureGeneratorsNames.split(",");
FeatureGenerator[] featureGenerators = new FeatureGenerator[classes.length];
for (int i = 0; i < featureGenerators.length; i++) {
featureGenerators[i] = ExtensionLoader.instantiateExtension(
FeatureGenerator.class, classes[i]);
}
return featureGenerators;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/doccat/TrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.doccat;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.params.BasicTrainingParams;
/**
* TrainingParams for DocCat.
*
* Note: Do not use this class, internal use only!
*/
interface TrainingParams extends BasicTrainingParams {
@ParameterDescription(valueName = "fg",
description = "Comma separated feature generator classes. Bag of words is used if not specified.")
@OptionalParameter
String getFeatureGenerators();
@ParameterDescription(valueName = "tokenizer",
description = "Tokenizer implementation. WhitespaceTokenizer is used if not specified.")
@OptionalParameter
String getTokenizer();
@ParameterDescription(valueName = "factoryName",
description = "A sub-class of DoccatFactory where to get implementation and resources.")
@OptionalParameter
String getFactory();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/entitylinker/EntityLinkerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.entitylinker;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.entitylinker.EntityLinker;
import opennlp.tools.entitylinker.EntityLinkerFactory;
import opennlp.tools.entitylinker.EntityLinkerProperties;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
import opennlp.tools.util.Span;
public class EntityLinkerTool extends BasicCmdLineTool {
@Override
public String getShortDescription() {
return "links an entity to an external data set";
}
@Override
public void run(String[] args) {
if (0 == args.length) {
System.out.println(getHelp());
}
else {
// TODO: Ask Mark if we can remove the type, the user knows upfront if he tries
// to link place names or company mentions ...
String entityType = "location";
// Load the properties, they should contain everything that is necessary to instantiate
// the component
// TODO: Entity Linker Properties constructor should not duplicate code
EntityLinkerProperties properties;
try {
properties = new EntityLinkerProperties(new File(args[0]));
}
catch (IOException e) {
throw new TerminateToolException(-1, "Failed to load the properties file!");
}
// TODO: It should not just throw Exception.
EntityLinker entityLinker;
try {
entityLinker = EntityLinkerFactory.getLinker(entityType, properties);
}
catch (Exception e) {
throw new TerminateToolException(-1, "Failed to instantiate the Entity Linker: " + e.getMessage());
}
PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
perfMon.start();
try (ObjectStream<String> untokenizedLineStream = new PlainTextByLineStream(
new SystemInputStreamFactory(), SystemInputStreamFactory.encoding())) {
List<NameSample> document = new ArrayList<>();
String line;
while ((line = untokenizedLineStream.read()) != null) {
if (line.trim().isEmpty()) {
// Run entity linker ... and output result ...
StringBuilder text = new StringBuilder();
Span[] sentences = new Span[document.size()];
Span[][] tokensBySentence = new Span[document.size()][];
Span[][] namesBySentence = new Span[document.size()][];
for (int i = 0; i < document.size(); i++) {
NameSample sample = document.get(i);
namesBySentence[i] = sample.getNames();
int sentenceBegin = text.length();
Span[] tokens = new Span[sample.getSentence().length];
// for all tokens
for (int ti = 0; ti < sample.getSentence().length; ti++) {
int tokenBegin = text.length();
text.append(sample.getSentence()[ti]);
text.append(" ");
tokens[ti] = new Span(tokenBegin, text.length());
}
tokensBySentence[i] = tokens;
sentences[i] = new Span(sentenceBegin, text.length());
text.append("\n");
}
List<Span> linkedSpans =
entityLinker.find(text.toString(), sentences, tokensBySentence, namesBySentence);
for (int i = 0; i < linkedSpans.size(); i++) {
System.out.println(linkedSpans.get(i));
}
perfMon.incrementCounter(document.size());
document.clear();
}
else {
document.add(NameSample.parse(line, false));
}
}
}
catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
@Override
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model < sentences";
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorConverterTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import opennlp.tools.cmdline.AbstractConverterTool;
import opennlp.tools.langdetect.LanguageSample;
public class LanguageDetectorConverterTool extends AbstractConverterTool<LanguageSample> {
public LanguageDetectorConverterTool() {
super(LanguageSample.class);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorCrossValidatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.cmdline.AbstractCrossValidatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.params.CVParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.langdetect.LanguageDetectorCrossValidator;
import opennlp.tools.langdetect.LanguageDetectorEvaluationMonitor;
import opennlp.tools.langdetect.LanguageDetectorFactory;
import opennlp.tools.langdetect.LanguageSample;
import opennlp.tools.util.eval.EvaluationMonitor;
import opennlp.tools.util.model.ModelUtil;
public final class LanguageDetectorCrossValidatorTool extends
AbstractCrossValidatorTool<LanguageSample,
LanguageDetectorCrossValidatorTool.CVToolParams> {
interface CVToolParams extends CVParams, TrainingParams, FineGrainedEvaluatorParams {
}
public LanguageDetectorCrossValidatorTool() {
super(LanguageSample.class, CVToolParams.class);
}
public String getShortDescription() {
return "K-fold cross validator for the learnable Language Detector";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
List<EvaluationMonitor<LanguageSample>> listeners = new LinkedList<>();
if (params.getMisclassified()) {
listeners.add(new LanguageDetectorEvaluationErrorListener());
}
LanguageDetectorFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new LanguageDetectorFineGrainedReportListener(reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw createTerminationIOException(e);
}
}
LanguageDetectorEvaluationMonitor[] listenersArr = listeners
.toArray(new LanguageDetectorEvaluationMonitor[listeners.size()]);
LanguageDetectorCrossValidator validator;
try {
LanguageDetectorFactory factory = LanguageDetectorFactory.create(params.getFactory());
validator = new LanguageDetectorCrossValidator(mlParams,
factory, listenersArr);
validator.evaluate(sampleStream, params.getFolds());
} catch (IOException e) {
throw new TerminateToolException(-1,
"IO error while reading training data or indexing data: " + e.getMessage(), e);
} finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
System.out.println("done");
if (reportListener != null) {
System.out.println("Writing fine-grained report to "
+ params.getReportOutputFile().getAbsolutePath());
reportListener.writeReport();
try {
// TODO: is it a problem to close the stream now?
reportOutputStream.close();
} catch (IOException e) {
// nothing to do
}
}
System.out.println();
System.out.println("Accuracy: " + validator.getDocumentAccuracy() + "\n" +
"Number of documents: " + validator.getDocumentCount());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorEvaluationErrorListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.OutputStream;
import opennlp.tools.cmdline.EvaluationErrorPrinter;
import opennlp.tools.langdetect.LanguageDetectorEvaluationMonitor;
import opennlp.tools.langdetect.LanguageSample;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* A default implementation of {@link EvaluationMonitor} that prints to an
* output stream.
*
*/
public class LanguageDetectorEvaluationErrorListener extends
EvaluationErrorPrinter<LanguageSample> implements LanguageDetectorEvaluationMonitor {
/**
* Creates a listener that will print to System.err
*/
public LanguageDetectorEvaluationErrorListener() {
super(System.err);
}
/**
* Creates a listener that will print to a given {@link OutputStream}
*/
public LanguageDetectorEvaluationErrorListener(OutputStream outputStream) {
super(outputStream);
printStream.println("Expected\tPredicted\tContext");
}
@Override
public void missclassified(LanguageSample reference, LanguageSample prediction) {
printError(reference, prediction);
}
@Override
protected void printError(LanguageSample referenceSample, LanguageSample predictedSample) {
printStream.println(String.join("\t", referenceSample.getLanguage().getLang(),
predictedSample.getLanguage().getLang(),
referenceSample.getContext()));
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.cmdline.AbstractEvaluatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.params.EvaluatorParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.langdetect.LanguageDetectorEvaluationMonitor;
import opennlp.tools.langdetect.LanguageDetectorEvaluator;
import opennlp.tools.langdetect.LanguageDetectorME;
import opennlp.tools.langdetect.LanguageDetectorModel;
import opennlp.tools.langdetect.LanguageSample;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.eval.EvaluationMonitor;
public final class LanguageDetectorEvaluatorTool extends
AbstractEvaluatorTool<LanguageSample, LanguageDetectorEvaluatorTool.EvalToolParams> {
interface EvalToolParams extends EvaluatorParams, FineGrainedEvaluatorParams {
}
public LanguageDetectorEvaluatorTool() {
super(LanguageSample.class, EvalToolParams.class);
}
public String getShortDescription() {
return "Measures the performance of the Language Detector model with the reference data";
}
public void run(String format, String[] args) {
super.run(format, args);
LanguageDetectorModel model = new LanguageDetectorModelLoader().load(params.getModel());
List<EvaluationMonitor<LanguageSample>> listeners = new LinkedList<>();
if (params.getMisclassified()) {
listeners.add(new LanguageDetectorEvaluationErrorListener());
}
LanguageDetectorFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new LanguageDetectorFineGrainedReportListener(reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1,
"IO error while creating LanguageDetector fine-grained report file: "
+ e.getMessage());
}
}
LanguageDetectorEvaluator evaluator = new LanguageDetectorEvaluator(
new LanguageDetectorME(model),
listeners.toArray(new LanguageDetectorEvaluationMonitor[listeners.size()]));
final PerformanceMonitor monitor = new PerformanceMonitor("doc");
ObjectStream<LanguageSample> measuredSampleStream = new ObjectStream<LanguageSample>() {
public LanguageSample read() throws IOException {
monitor.incrementCounter();
return sampleStream.read();
}
public void reset() throws IOException {
sampleStream.reset();
}
public void close() throws IOException {
sampleStream.close();
}
};
monitor.startAndPrintThroughput();
try {
evaluator.evaluate(measuredSampleStream);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "IO error while reading test data: "
+ e.getMessage(), e);
} finally {
try {
measuredSampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
monitor.stopAndPrintFinalResult();
System.out.println();
System.out.println(evaluator);
if (reportListener != null) {
System.out.println("Writing fine-grained report to "
+ params.getReportOutputFile().getAbsolutePath());
reportListener.writeReport();
try {
// TODO: is it a problem to close the stream now?
reportOutputStream.close();
} catch (IOException e) {
// nothing to do
}
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorFineGrainedReportListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.OutputStream;
import opennlp.tools.cmdline.FineGrainedReportListener;
import opennlp.tools.langdetect.LanguageDetectorEvaluationMonitor;
import opennlp.tools.langdetect.LanguageSample;
/**
* Generates a detailed report for the POS Tagger.
* <p>
* It is possible to use it from an API and access the statistics using the
* provided getters
*/
public class LanguageDetectorFineGrainedReportListener
extends FineGrainedReportListener implements LanguageDetectorEvaluationMonitor {
/**
* Creates a listener that will print to {@link System#err}
*/
public LanguageDetectorFineGrainedReportListener() {
this(System.err);
}
/**
* Creates a listener that prints to a given {@link OutputStream}
*/
public LanguageDetectorFineGrainedReportListener(OutputStream outputStream) {
super(outputStream);
}
// methods inherited from EvaluationMonitor
public void missclassified(LanguageSample reference, LanguageSample prediction) {
statsAdd(reference, prediction);
}
public void correctlyClassified(LanguageSample reference, LanguageSample prediction) {
statsAdd(reference, prediction);
}
private void statsAdd(LanguageSample reference, LanguageSample prediction) {
getStats().add(reference.getContext(),
reference.getLanguage().getLang(), prediction.getLanguage().getLang());
}
public void writeReport() {
printGeneralStatistics();
printTagsErrorRank();
printGeneralConfusionTable();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.cmdline.ModelLoader;
import opennlp.tools.langdetect.LanguageDetectorModel;
/**
* Loads a Language Detector Model for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public class LanguageDetectorModelLoader extends ModelLoader<LanguageDetectorModel> {
public LanguageDetectorModelLoader() {
super("Language Detector");
}
@Override
protected LanguageDetectorModel loadModel(InputStream modelIn) throws IOException {
return new LanguageDetectorModel(modelIn);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.langdetect.Language;
import opennlp.tools.langdetect.LanguageDetector;
import opennlp.tools.langdetect.LanguageDetectorME;
import opennlp.tools.langdetect.LanguageDetectorModel;
import opennlp.tools.langdetect.LanguageSample;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.ParagraphStream;
import opennlp.tools.util.PlainTextByLineStream;
public class LanguageDetectorTool extends BasicCmdLineTool {
@Override
public String getShortDescription() {
return "learned language detector";
}
@Override
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model < documents";
}
@Override
public void run(String[] args) {
if (0 == args.length) {
System.out.println(getHelp());
} else {
LanguageDetectorModel model = new LanguageDetectorModelLoader().load(new File(args[0]));
LanguageDetector langDetectME = new LanguageDetectorME(model);
/*
* moved initialization to the try block to catch new IOException
*/
ObjectStream<String> documentStream;
PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "doc");
perfMon.start();
try {
documentStream = new ParagraphStream(new PlainTextByLineStream(
new SystemInputStreamFactory(), SystemInputStreamFactory.encoding()));
String document;
while ((document = documentStream.read()) != null) {
Language lang = langDetectME.predictLanguage(document);
LanguageSample sample = new LanguageSample(lang, document);
System.out.println(sample.toString());
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/LanguageDetectorTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.AbstractTrainerTool;
import opennlp.tools.cmdline.ArgumentParser;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.langdetect.LanguageDetectorFactory;
import opennlp.tools.langdetect.LanguageDetectorME;
import opennlp.tools.langdetect.LanguageDetectorModel;
import opennlp.tools.langdetect.LanguageSample;
import opennlp.tools.util.model.ModelUtil;
public class LanguageDetectorTrainerTool
extends AbstractTrainerTool<LanguageSample, LanguageDetectorTrainerTool.TrainerToolParams> {
interface TrainerToolParams extends TrainingParams {
@ArgumentParser.ParameterDescription(valueName = "modelFile", description = "output model file.")
File getModel();
@ArgumentParser.ParameterDescription(valueName = "paramsFile", description = "training parameters file.")
@ArgumentParser.OptionalParameter()
String getParams();
}
public LanguageDetectorTrainerTool() {
super(LanguageSample.class, TrainerToolParams.class);
}
@Override
public String getShortDescription() {
return "trainer for the learnable language detector";
}
@Override
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
File modelOutFile = params.getModel();
CmdLineUtil.checkOutputFile("language detector model", modelOutFile);
LanguageDetectorModel model;
try {
LanguageDetectorFactory factory = LanguageDetectorFactory.create(params.getFactory());
model = LanguageDetectorME.train(sampleStream, mlParams, factory);
} catch (IOException e) {
throw createTerminationIOException(e);
}
finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
CmdLineUtil.writeModel("language detector", modelOutFile, model);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/langdetect/TrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.langdetect;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
/**
* TrainingParams for Language Detector.
*
* Note: Do not use this class, internal use only!
*/
interface TrainingParams {
@ParameterDescription(valueName = "paramsFile", description = "training parameters file.")
@OptionalParameter()
String getParams();
@ParameterDescription(valueName = "factoryName",
description = "A sub-class of LanguageDetectorFactory" +
" where to get implementation and resources.")
@OptionalParameter
String getFactory();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/languagemodel/NGramLanguageModelTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.languagemodel;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.languagemodel.NGramLanguageModel;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
import opennlp.tools.util.StringList;
/**
* Command line tool for {@link opennlp.tools.languagemodel.NGramLanguageModel}.
*/
public class NGramLanguageModelTool extends BasicCmdLineTool {
@Override
public String getShortDescription() {
return "gives the probability and most probable next token(s) of a sequence of tokens in a " +
"language model";
}
@Override
public void run(String[] args) {
File lmFile = new File(args[0]);
FileInputStream stream = null;
try {
stream = new FileInputStream(lmFile);
NGramLanguageModel nGramLanguageModel = new NGramLanguageModel(stream);
ObjectStream<String> lineStream;
PerformanceMonitor perfMon = null;
try {
lineStream = new PlainTextByLineStream(new SystemInputStreamFactory(),
SystemInputStreamFactory.encoding());
perfMon = new PerformanceMonitor(System.err, "nglm");
perfMon.start();
String line;
while ((line = lineStream.read()) != null) {
double probability;
StringList predicted;
String[] tokens = line.split(" ");
StringList sample = new StringList(tokens);
try {
probability = nGramLanguageModel.calculateProbability(sample);
predicted = nGramLanguageModel.predictNextTokens(sample);
} catch (Exception e) {
System.err.println("Error:" + e.getLocalizedMessage());
System.err.println(line);
continue;
}
System.out.println(sample + " -> prob:" + probability + ", next:" + predicted);
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
} catch (java.io.IOException e) {
System.err.println(e.getLocalizedMessage());
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// do nothing
}
}
}
}
@Override
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model";
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmaEvaluationErrorListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.OutputStream;
import opennlp.tools.cmdline.EvaluationErrorPrinter;
import opennlp.tools.lemmatizer.LemmaSample;
import opennlp.tools.lemmatizer.LemmatizerEvaluationMonitor;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* A default implementation of {@link EvaluationMonitor} that prints to an
* output stream.
*
*/
public class LemmaEvaluationErrorListener extends
EvaluationErrorPrinter<LemmaSample> implements LemmatizerEvaluationMonitor {
/**
* Creates a listener that will print to System.err
*/
public LemmaEvaluationErrorListener() {
super(System.err);
}
/**
* Creates a listener that will print to a given {@link OutputStream}
*/
public LemmaEvaluationErrorListener(OutputStream outputStream) {
super(outputStream);
}
@Override
public void missclassified(LemmaSample reference, LemmaSample prediction) {
printError(reference.getLemmas(), prediction.getLemmas(), reference,
prediction, reference.getTokens());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmatizerEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import opennlp.tools.cmdline.AbstractEvaluatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.params.EvaluatorParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.lemmatizer.LemmaSample;
import opennlp.tools.lemmatizer.LemmatizerEvaluationMonitor;
import opennlp.tools.lemmatizer.LemmatizerEvaluator;
import opennlp.tools.lemmatizer.LemmatizerModel;
public final class LemmatizerEvaluatorTool
extends AbstractEvaluatorTool<LemmaSample, LemmatizerEvaluatorTool.EvalToolParams> {
public LemmatizerEvaluatorTool() {
super(LemmaSample.class, EvalToolParams.class);
}
public String getShortDescription() {
return "Measures the performance of the Lemmatizer model with the reference data";
}
public void run(String format, String[] args) {
super.run(format, args);
LemmatizerModel model = new LemmatizerModelLoader().load(params.getModel());
LemmatizerEvaluationMonitor missclassifiedListener = null;
if (params.getMisclassified()) {
missclassifiedListener = new LemmaEvaluationErrorListener();
}
LemmatizerFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new LemmatizerFineGrainedReportListener(
reportOutputStream);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1,
"IO error while creating Lemmatizer fine-grained report file: "
+ e.getMessage());
}
}
LemmatizerEvaluator evaluator = new LemmatizerEvaluator(
new opennlp.tools.lemmatizer.LemmatizerME(model),
missclassifiedListener, reportListener);
System.out.print("Evaluating ... ");
try {
evaluator.evaluate(sampleStream);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1,
"IO error while reading test data: " + e.getMessage(), e);
} finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
System.out.println("done");
if (reportListener != null) {
System.out.println("Writing fine-grained report to "
+ params.getReportOutputFile().getAbsolutePath());
reportListener.writeReport();
try {
// TODO: is it a problem to close the stream now?
reportOutputStream.close();
} catch (IOException e) {
// nothing to do
}
}
System.out.println();
System.out.println("Accuracy: " + evaluator.getWordAccuracy());
}
interface EvalToolParams extends EvaluatorParams, FineGrainedEvaluatorParams {
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmatizerFineGrainedReportListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.OutputStream;
import opennlp.tools.cmdline.FineGrainedReportListener;
import opennlp.tools.lemmatizer.LemmaSample;
import opennlp.tools.lemmatizer.LemmatizerEvaluationMonitor;
/**
* Generates a detailed report for the Lemmatizer.
* <p>
* It is possible to use it from an API and access the statistics using the
* provided getters.
*
*/
public class LemmatizerFineGrainedReportListener
extends FineGrainedReportListener implements LemmatizerEvaluationMonitor {
/**
* Creates a listener that will print to {@link System#err}
*/
public LemmatizerFineGrainedReportListener() {
super(System.err);
}
/**
* Creates a listener that prints to a given {@link OutputStream}
*/
public LemmatizerFineGrainedReportListener(OutputStream outputStream) {
super(outputStream);
}
// methods inherited from EvaluationMonitor
public void missclassified(LemmaSample reference, LemmaSample prediction) {
statsAdd(reference, prediction);
}
public void correctlyClassified(LemmaSample reference, LemmaSample prediction) {
statsAdd(reference, prediction);
}
private void statsAdd(LemmaSample reference, LemmaSample prediction) {
getStats().add(reference.getTokens(), reference.getTags(), prediction.getTags());
}
public void writeReport() {
printGeneralStatistics();
// token stats
printTokenErrorRank();
printTokenOcurrenciesRank();
// tag stats
printTagsErrorRank();
// confusion tables
printGeneralConfusionTable();
printDetailedConfusionMatrix();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmatizerMETool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.lemmatizer.LemmaSample;
import opennlp.tools.lemmatizer.LemmatizerME;
import opennlp.tools.lemmatizer.LemmatizerModel;
import opennlp.tools.postag.POSSample;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
public class LemmatizerMETool extends BasicCmdLineTool {
public String getShortDescription() {
return "learnable lemmatizer";
}
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model < sentences";
}
public void run(String[] args) {
if (args.length != 1) {
System.out.println(getHelp());
} else {
LemmatizerModel model = new LemmatizerModelLoader()
.load(new File(args[0]));
LemmatizerME lemmatizer = new LemmatizerME(model);
ObjectStream<String> lineStream;
PerformanceMonitor perfMon = null;
try {
lineStream = new PlainTextByLineStream(new SystemInputStreamFactory(),
SystemInputStreamFactory.encoding());
perfMon = new PerformanceMonitor(System.err, "sent");
perfMon.start();
String line;
while ((line = lineStream.read()) != null) {
POSSample posSample;
try {
posSample = POSSample.parse(line);
} catch (InvalidFormatException e) {
System.err.println("Invalid format:");
System.err.println(line);
continue;
}
String[] lemmas = lemmatizer.lemmatize(posSample.getSentence(),
posSample.getTags());
System.out.println(new LemmaSample(posSample.getSentence(),
posSample.getTags(), lemmas).toString());
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmatizerModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.cmdline.ModelLoader;
import opennlp.tools.lemmatizer.LemmatizerModel;
/**
* Loads a Lemmatizer Model for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public class LemmatizerModelLoader extends ModelLoader<LemmatizerModel> {
public LemmatizerModelLoader() {
super("Lemmatizer");
}
@Override
protected LemmatizerModel loadModel(InputStream modelIn) throws IOException {
return new LemmatizerModel(modelIn);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/LemmatizerTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import java.io.File;
import java.io.IOException;
import opennlp.tools.cmdline.AbstractTrainerTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.lemmatizer.LemmatizerTrainerTool.TrainerToolParams;
import opennlp.tools.cmdline.params.TrainingToolParams;
import opennlp.tools.lemmatizer.LemmaSample;
import opennlp.tools.lemmatizer.LemmatizerFactory;
import opennlp.tools.lemmatizer.LemmatizerME;
import opennlp.tools.lemmatizer.LemmatizerModel;
import opennlp.tools.util.model.ModelUtil;
public class LemmatizerTrainerTool
extends AbstractTrainerTool<LemmaSample, TrainerToolParams> {
interface TrainerToolParams extends TrainingParams, TrainingToolParams {
}
public LemmatizerTrainerTool() {
super(LemmaSample.class, TrainerToolParams.class);
}
public String getName() {
return "LemmatizerTrainerME";
}
public String getShortDescription() {
return "trainer for the learnable lemmatizer";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), false);
if (mlParams == null) {
mlParams = ModelUtil.createDefaultTrainingParameters();
}
File modelOutFile = params.getModel();
CmdLineUtil.checkOutputFile("lemmatizer model", modelOutFile);
LemmatizerModel model;
try {
LemmatizerFactory lemmatizerFactory = LemmatizerFactory
.create(params.getFactory());
model = LemmatizerME.train(params.getLang(), sampleStream, mlParams,
lemmatizerFactory);
} catch (IOException e) {
throw createTerminationIOException(e);
} finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
CmdLineUtil.writeModel("lemmatizer", modelOutFile, model);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/TrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.lemmatizer;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.params.BasicTrainingParams;
/**
* TrainingParams for the Lemmatizer.
*
* Note: Do not use this class, internal use only!
*/
interface TrainingParams extends BasicTrainingParams {
@ParameterDescription(valueName = "factoryName",
description = "A sub-class of LemmatizerFactory where to get implementation and resources.")
@OptionalParameter
String getFactory();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/lemmatizer/package-info.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes giving access to the opennlp.tools.lemmatizer functionalities.
*/
package opennlp.tools.cmdline.lemmatizer;
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/CensusDictionaryCreatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.Charset;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.formats.NameFinderCensus90NameStream;
import opennlp.tools.util.InputStreamFactory;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.StringList;
/**
* This tool helps create a loadable dictionary for the {@code NameFinder},
* from data collected from US Census data.
* <p>
* Data for the US Census and names can be found here for the 1990 Census:
* <br>
* <a href="http://www.census.gov/genealogy/names/names_files.html">www.census.gov</a>
*/
public class CensusDictionaryCreatorTool extends BasicCmdLineTool {
/**
* Create a list of expected parameters.
*/
interface Parameters {
@ParameterDescription(valueName = "code")
@OptionalParameter(defaultValue = "eng")
String getLang();
@ParameterDescription(valueName = "charsetName")
@OptionalParameter(defaultValue = "UTF-8")
String getEncoding();
@ParameterDescription(valueName = "censusDict")
String getCensusData();
@ParameterDescription(valueName = "dict")
String getDict();
}
public String getShortDescription() {
return "Converts 1990 US Census names into a dictionary";
}
public String getHelp() {
return getBasicHelp(Parameters.class);
}
/**
* Creates a dictionary.
*
* @param sampleStream stream of samples.
* @return a {@code Dictionary} class containing the name dictionary
* built from the input file.
* @throws IOException IOException
*/
public static Dictionary createDictionary(ObjectStream<StringList> sampleStream) throws IOException {
Dictionary mNameDictionary = new Dictionary(true);
StringList entry;
entry = sampleStream.read();
while (entry != null) {
if (!mNameDictionary.contains(entry)) {
mNameDictionary.put(entry);
}
entry = sampleStream.read();
}
return mNameDictionary;
}
public void run(String[] args) {
Parameters params = validateAndParseParams(args, Parameters.class);
File testData = new File(params.getCensusData());
File dictOutFile = new File(params.getDict());
CmdLineUtil.checkInputFile("Name data", testData);
CmdLineUtil.checkOutputFile("Dictionary file", dictOutFile);
InputStreamFactory sampleDataIn = CmdLineUtil.createInputStreamFactory(testData);
Dictionary mDictionary;
try (
ObjectStream<StringList> sampleStream = new NameFinderCensus90NameStream(
sampleDataIn, Charset.forName(params.getEncoding()))) {
System.out.println("Creating Dictionary...");
mDictionary = createDictionary(sampleStream);
} catch (IOException e) {
throw new TerminateToolException(-1, "IO error while reading training data or indexing data: "
+ e.getMessage(), e);
}
System.out.println("Saving Dictionary...");
try (OutputStream out = new FileOutputStream(dictOutFile)) {
mDictionary.serialize(out);
} catch (IOException e) {
throw new TerminateToolException(-1, "IO error while writing dictionary file: "
+ e.getMessage(), e);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/NameEvaluationErrorListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.OutputStream;
import opennlp.tools.cmdline.EvaluationErrorPrinter;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.TokenNameFinderEvaluationMonitor;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* A default implementation of {@link EvaluationMonitor} that prints
* to an output stream.
*
*/
public class NameEvaluationErrorListener extends
EvaluationErrorPrinter<NameSample> implements TokenNameFinderEvaluationMonitor {
/**
* Creates a listener that will print to System.err
*/
public NameEvaluationErrorListener() {
super(System.err);
}
/**
* Creates a listener that will print to a given {@link OutputStream}
*/
public NameEvaluationErrorListener(OutputStream outputStream) {
super(outputStream);
}
@Override
public void missclassified(NameSample reference, NameSample prediction) {
printError(reference.getId(), reference.getNames(), prediction.getNames(), reference,
prediction, reference.getSentence());
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/NameSampleCountersStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.util.FilterObjectStream;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Span;
/**
* Counts tokens, sentences and names by type
*/
public class NameSampleCountersStream
extends FilterObjectStream<NameSample, NameSample> {
private int sentenceCount;
private int tokenCount;
private Map<String, Integer> nameCounters = new HashMap<>();
protected NameSampleCountersStream(ObjectStream<NameSample> samples) {
super(samples);
}
@Override
public NameSample read() throws IOException {
NameSample sample = samples.read();
if (sample != null) {
sentenceCount++;
tokenCount += sample.getSentence().length;
for (Span nameSpan : sample.getNames()) {
Integer nameCounter = nameCounters.get(nameSpan.getType());
if (nameCounter == null) {
nameCounter = 0;
}
nameCounters.put(nameSpan.getType(), nameCounter + 1);
}
}
return sample;
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
super.reset();
sentenceCount = 0;
tokenCount = 0;
nameCounters = new HashMap<>();
}
public int getSentenceCount() {
return sentenceCount;
}
public int getTokenCount() {
return tokenCount;
}
public Map<String, Integer> getNameCounters() {
return Collections.unmodifiableMap(nameCounters);
}
public void printSummary() {
System.out.println("Training data summary:");
System.out.println("#Sentences: " + getSentenceCount());
System.out.println("#Tokens: " + getTokenCount());
int totalNames = 0;
for (Map.Entry<String, Integer> counter : getNameCounters().entrySet()) {
System.out.println("#" + counter.getKey() + " entities: " + counter.getValue());
totalNames += counter.getValue();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderConverterTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import opennlp.tools.cmdline.AbstractConverterTool;
import opennlp.tools.namefind.NameSample;
/**
* Tool to convert multiple data formats into native OpenNLP name finder training
* format.
*/
public class TokenNameFinderConverterTool extends AbstractConverterTool<NameSample> {
public TokenNameFinderConverterTool() {
super(NameSample.class);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderCrossValidatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import opennlp.tools.cmdline.AbstractCrossValidatorTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.namefind.TokenNameFinderCrossValidatorTool.CVToolParams;
import opennlp.tools.cmdline.params.CVParams;
import opennlp.tools.cmdline.params.DetailedFMeasureEvaluatorParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.namefind.BilouCodec;
import opennlp.tools.namefind.BioCodec;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.NameSampleTypeFilter;
import opennlp.tools.namefind.TokenNameFinderCrossValidator;
import opennlp.tools.namefind.TokenNameFinderEvaluationMonitor;
import opennlp.tools.namefind.TokenNameFinderFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.SequenceCodec;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.eval.EvaluationMonitor;
public final class TokenNameFinderCrossValidatorTool
extends AbstractCrossValidatorTool<NameSample, CVToolParams> {
interface CVToolParams extends TrainingParams, CVParams,
DetailedFMeasureEvaluatorParams, FineGrainedEvaluatorParams {
}
public TokenNameFinderCrossValidatorTool() {
super(NameSample.class, CVToolParams.class);
}
public String getShortDescription() {
return "K-fold cross validator for the learnable Name Finder";
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), true);
if (mlParams == null) {
mlParams = new TrainingParameters();
}
byte[] featureGeneratorBytes =
TokenNameFinderTrainerTool.openFeatureGeneratorBytes(params.getFeaturegen());
Map<String, Object> resources;
try {
resources = TokenNameFinderTrainerTool.loadResources(params.getResources(), params.getFeaturegen());
}
catch (IOException e) {
throw new TerminateToolException(-1,"IO error while loading resources", e);
}
if (params.getNameTypes() != null) {
String[] nameTypes = params.getNameTypes().split(",");
sampleStream = new NameSampleTypeFilter(nameTypes, sampleStream);
}
List<EvaluationMonitor<NameSample>> listeners = new LinkedList<>();
if (params.getMisclassified()) {
listeners.add(new NameEvaluationErrorListener());
}
TokenNameFinderDetailedFMeasureListener detailedFListener = null;
if (params.getDetailedF()) {
detailedFListener = new TokenNameFinderDetailedFMeasureListener();
listeners.add(detailedFListener);
}
String sequenceCodecImplName = params.getSequenceCodec();
if ("BIO".equals(sequenceCodecImplName)) {
sequenceCodecImplName = BioCodec.class.getName();
}
else if ("BILOU".equals(sequenceCodecImplName)) {
sequenceCodecImplName = BilouCodec.class.getName();
}
SequenceCodec<String> sequenceCodec =
TokenNameFinderFactory.instantiateSequenceCodec(sequenceCodecImplName);
TokenNameFinderFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new TokenNameFinderFineGrainedReportListener(sequenceCodec,
reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1,
"IO error while creating Name Finder fine-grained report file: "
+ e.getMessage());
}
}
TokenNameFinderFactory nameFinderFactory;
try {
nameFinderFactory = TokenNameFinderFactory.create(params.getFactory(),
featureGeneratorBytes, resources, sequenceCodec);
} catch (InvalidFormatException e) {
throw new TerminateToolException(-1, e.getMessage(), e);
}
TokenNameFinderCrossValidator validator;
try {
validator = new TokenNameFinderCrossValidator(params.getLang(),
params.getType(), mlParams, nameFinderFactory,
listeners.toArray(new TokenNameFinderEvaluationMonitor[listeners.size()]));
validator.evaluate(sampleStream, params.getFolds());
} catch (IOException e) {
throw createTerminationIOException(e);
} finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
System.out.println("done");
System.out.println();
if (reportFile != null) {
reportListener.writeReport();
}
if (detailedFListener == null) {
System.out.println(validator.getFMeasure());
} else {
System.out.println(detailedFListener.toString());
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderDetailedFMeasureListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import opennlp.tools.cmdline.DetailedFMeasureListener;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.TokenNameFinderEvaluationMonitor;
import opennlp.tools.util.Span;
public class TokenNameFinderDetailedFMeasureListener extends
DetailedFMeasureListener<NameSample> implements
TokenNameFinderEvaluationMonitor {
@Override
protected Span[] asSpanArray(NameSample sample) {
return sample.getNames();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderEvaluatorTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedList;
import java.util.List;
import opennlp.tools.cmdline.AbstractEvaluatorTool;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.namefind.TokenNameFinderEvaluatorTool.EvalToolParams;
import opennlp.tools.cmdline.params.DetailedFMeasureEvaluatorParams;
import opennlp.tools.cmdline.params.EvaluatorParams;
import opennlp.tools.cmdline.params.FineGrainedEvaluatorParams;
import opennlp.tools.namefind.NameFinderME;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.NameSampleTypeFilter;
import opennlp.tools.namefind.TokenNameFinderEvaluationMonitor;
import opennlp.tools.namefind.TokenNameFinderEvaluator;
import opennlp.tools.namefind.TokenNameFinderModel;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.eval.EvaluationMonitor;
public final class TokenNameFinderEvaluatorTool
extends AbstractEvaluatorTool<NameSample, EvalToolParams> {
interface EvalToolParams extends EvaluatorParams,
DetailedFMeasureEvaluatorParams, FineGrainedEvaluatorParams {
@OptionalParameter
@ParameterDescription(valueName = "types", description = "name types to use for evaluation")
String getNameTypes();
}
public TokenNameFinderEvaluatorTool() {
super(NameSample.class, EvalToolParams.class);
}
public String getShortDescription() {
return "Measures the performance of the NameFinder model with the reference data";
}
public void run(String format, String[] args) {
super.run(format, args);
TokenNameFinderModel model = new TokenNameFinderModelLoader().load(params.getModel());
List<EvaluationMonitor<NameSample>> listeners = new LinkedList<EvaluationMonitor<NameSample>>();
if (params.getMisclassified()) {
listeners.add(new NameEvaluationErrorListener());
}
TokenNameFinderDetailedFMeasureListener detailedFListener = null;
if (params.getDetailedF()) {
detailedFListener = new TokenNameFinderDetailedFMeasureListener();
listeners.add(detailedFListener);
}
TokenNameFinderFineGrainedReportListener reportListener = null;
File reportFile = params.getReportOutputFile();
OutputStream reportOutputStream = null;
if (reportFile != null) {
CmdLineUtil.checkOutputFile("Report Output File", reportFile);
try {
reportOutputStream = new FileOutputStream(reportFile);
reportListener = new TokenNameFinderFineGrainedReportListener(model.getSequenceCodec(),
reportOutputStream);
listeners.add(reportListener);
} catch (FileNotFoundException e) {
throw new TerminateToolException(-1,
"IO error while creating Name Finder fine-grained report file: "
+ e.getMessage());
}
}
if (params.getNameTypes() != null) {
String[] nameTypes = params.getNameTypes().split(",");
sampleStream = new NameSampleTypeFilter(nameTypes, sampleStream);
}
TokenNameFinderEvaluator evaluator = new TokenNameFinderEvaluator(
new NameFinderME(model),
listeners.toArray(new TokenNameFinderEvaluationMonitor[listeners.size()]));
final PerformanceMonitor monitor = new PerformanceMonitor("sent");
ObjectStream<NameSample> measuredSampleStream = new ObjectStream<NameSample>() {
public NameSample read() throws IOException {
monitor.incrementCounter();
return sampleStream.read();
}
public void reset() throws IOException {
sampleStream.reset();
}
public void close() throws IOException {
sampleStream.close();
}
};
monitor.startAndPrintThroughput();
try {
evaluator.evaluate(measuredSampleStream);
} catch (IOException e) {
System.err.println("failed");
throw new TerminateToolException(-1, "IO error while reading test data: " + e.getMessage(), e);
} finally {
try {
measuredSampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
monitor.stopAndPrintFinalResult();
System.out.println();
if (reportFile != null) {
reportListener.writeReport();
}
if (detailedFListener == null) {
System.out.println(evaluator.getFMeasure());
} else {
System.out.println(detailedFListener.toString());
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderFineGrainedReportListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.OutputStream;
import java.util.Comparator;
import java.util.Map;
import opennlp.tools.cmdline.FineGrainedReportListener;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.TokenNameFinderEvaluationMonitor;
import opennlp.tools.util.SequenceCodec;
/**
* Generates a detailed report for the NameFinder.
* <p>
* It is possible to use it from an API and access the statistics using the
* provided getters
*/
public class TokenNameFinderFineGrainedReportListener
extends FineGrainedReportListener implements TokenNameFinderEvaluationMonitor {
private SequenceCodec<String> sequenceCodec;
/**
* Creates a listener that will print to {@link System#err}
*/
public TokenNameFinderFineGrainedReportListener(SequenceCodec<String> seqCodec) {
this(seqCodec, System.err);
}
/**
* Creates a listener that prints to a given {@link OutputStream}
*/
public TokenNameFinderFineGrainedReportListener(SequenceCodec<String> seqCodec, OutputStream outputStream) {
super(outputStream);
this.sequenceCodec = seqCodec;
}
// methods inherited from EvaluationMonitor
public void missclassified(NameSample reference, NameSample prediction) {
statsAdd(reference, prediction);
}
public void correctlyClassified(NameSample reference,
NameSample prediction) {
statsAdd(reference, prediction);
}
private void statsAdd(NameSample reference, NameSample prediction) {
String[] refTags = sequenceCodec.encode(reference.getNames(), reference.getSentence().length);
String[] predTags = sequenceCodec.encode(prediction.getNames(), prediction.getSentence().length);
// we don' want it to compute token frequency, so we pass an array of empty strings instead
// of tokens
getStats().add(new String[reference.getSentence().length], refTags, predTags);
}
@Override
public Comparator<String> getMatrixLabelComparator(Map<String, ConfusionMatrixLine> confusionMatrix) {
return new GroupedMatrixLabelComparator(confusionMatrix);
}
@Override
public Comparator<String> getLabelComparator(Map<String, Counter> map) {
return new GroupedLabelComparator(map);
}
public void writeReport() {
printGeneralStatistics();
printTagsErrorRank();
printGeneralConfusionTable();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderModelLoader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.IOException;
import java.io.InputStream;
import opennlp.tools.cmdline.ModelLoader;
import opennlp.tools.namefind.TokenNameFinderModel;
/**
* Loads a Token Name Finder Model for the command line tools.
* <p>
* <b>Note:</b> Do not use this class, internal use only!
*/
public final class TokenNameFinderModelLoader extends ModelLoader<TokenNameFinderModel> {
public TokenNameFinderModelLoader() {
super("Token Name Finder");
}
@Override
protected TokenNameFinderModel loadModel(InputStream modelIn) throws IOException {
return new TokenNameFinderModel(modelIn);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import opennlp.tools.cmdline.BasicCmdLineTool;
import opennlp.tools.cmdline.CLI;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.PerformanceMonitor;
import opennlp.tools.cmdline.SystemInputStreamFactory;
import opennlp.tools.namefind.NameFinderME;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.TokenNameFinder;
import opennlp.tools.namefind.TokenNameFinderModel;
import opennlp.tools.tokenize.WhitespaceTokenizer;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
import opennlp.tools.util.Span;
public final class TokenNameFinderTool extends BasicCmdLineTool {
public String getShortDescription() {
return "learnable name finder";
}
public String getHelp() {
return "Usage: " + CLI.CMD + " " + getName() + " model1 model2 ... modelN < sentences";
}
@Override
public void run(String[] args) {
if (args.length == 0) {
System.out.println(getHelp());
} else {
NameFinderME[] nameFinders = new NameFinderME[args.length];
for (int i = 0; i < nameFinders.length; i++) {
TokenNameFinderModel model = new TokenNameFinderModelLoader().load(new File(args[i]));
nameFinders[i] = new NameFinderME(model);
}
// ObjectStream<String> untokenizedLineStream =
// new PlainTextByLineStream(new InputStreamReader(System.in));
ObjectStream<String> untokenizedLineStream;
PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
perfMon.start();
try {
untokenizedLineStream = new PlainTextByLineStream(
new SystemInputStreamFactory(), SystemInputStreamFactory.encoding());
String line;
while ((line = untokenizedLineStream.read()) != null) {
String[] whitespaceTokenizerLine = WhitespaceTokenizer.INSTANCE.tokenize(line);
// A new line indicates a new document,
// adaptive data must be cleared for a new document
if (whitespaceTokenizerLine.length == 0) {
for (NameFinderME nameFinder : nameFinders) {
nameFinder.clearAdaptiveData();
}
}
List<Span> names = new ArrayList<>();
for (TokenNameFinder nameFinder : nameFinders) {
Collections.addAll(names, nameFinder.find(whitespaceTokenizerLine));
}
// Simple way to drop intersecting spans, otherwise the
// NameSample is invalid
Span[] reducedNames = NameFinderME.dropOverlappingSpans(
names.toArray(new Span[names.size()]));
NameSample nameSample = new NameSample(whitespaceTokenizerLine,
reducedNames, false);
System.out.println(nameSample.toString());
perfMon.incrementCounter();
}
} catch (IOException e) {
CmdLineUtil.handleStdinIoError(e);
}
perfMon.stopAndPrintFinalResult();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TokenNameFinderTrainerTool.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.cmdline.AbstractTrainerTool;
import opennlp.tools.cmdline.CmdLineUtil;
import opennlp.tools.cmdline.TerminateToolException;
import opennlp.tools.cmdline.namefind.TokenNameFinderTrainerTool.TrainerToolParams;
import opennlp.tools.cmdline.params.TrainingToolParams;
import opennlp.tools.namefind.BilouCodec;
import opennlp.tools.namefind.BioCodec;
import opennlp.tools.namefind.NameSample;
import opennlp.tools.namefind.NameSampleTypeFilter;
import opennlp.tools.namefind.TokenNameFinderFactory;
import opennlp.tools.namefind.TokenNameFinderModel;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.SequenceCodec;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.featuregen.GeneratorFactory;
import opennlp.tools.util.model.ArtifactSerializer;
import opennlp.tools.util.model.ModelUtil;
public final class TokenNameFinderTrainerTool
extends AbstractTrainerTool<NameSample, TrainerToolParams> {
interface TrainerToolParams extends TrainingParams, TrainingToolParams {
}
public TokenNameFinderTrainerTool() {
super(NameSample.class, TrainerToolParams.class);
}
public String getShortDescription() {
return "trainer for the learnable name finder";
}
static byte[] openFeatureGeneratorBytes(String featureGenDescriptorFile) {
if (featureGenDescriptorFile != null) {
return openFeatureGeneratorBytes(new File(featureGenDescriptorFile));
}
return null;
}
public static byte[] openFeatureGeneratorBytes(File featureGenDescriptorFile) {
byte[] featureGeneratorBytes = null;
// load descriptor file into memory
if (featureGenDescriptorFile != null) {
try (InputStream bytesIn = CmdLineUtil.openInFile(featureGenDescriptorFile)) {
featureGeneratorBytes = ModelUtil.read(bytesIn);
} catch (IOException e) {
throw new TerminateToolException(-1, "IO error while reading training data or indexing data: "
+ e.getMessage(), e);
}
}
return featureGeneratorBytes;
}
/**
* Load the resources, such as dictionaries, by reading the feature xml descriptor
* and looking into the directory passed as argument.
* @param resourcePath the directory in which the resources are to be found
* @param featureGenDescriptor the feature xml descriptor
* @return a map consisting of the file name of the resource and its corresponding Object
*/
public static Map<String, Object> loadResources(File resourcePath, File featureGenDescriptor)
throws IOException {
Map<String, Object> resources = new HashMap<>();
if (resourcePath != null) {
Map<String, ArtifactSerializer> artifactSerializers = new HashMap<>();
if (featureGenDescriptor != null) {
try (InputStream xmlDescriptorIn = CmdLineUtil.openInFile(featureGenDescriptor)) {
artifactSerializers.putAll(
GeneratorFactory.extractArtifactSerializerMappings(xmlDescriptorIn));
}
}
for (Map.Entry<String, ArtifactSerializer> serializerMapping : artifactSerializers.entrySet()) {
String resourceName = serializerMapping.getKey();
try (InputStream resourceIn = CmdLineUtil.openInFile(new File(resourcePath, resourceName))) {
resources.put(resourceName, serializerMapping.getValue().create(resourceIn));
}
}
}
return resources;
}
public void run(String format, String[] args) {
super.run(format, args);
mlParams = CmdLineUtil.loadTrainingParameters(params.getParams(), true);
if (mlParams == null) {
mlParams = new TrainingParameters();
}
File modelOutFile = params.getModel();
byte[] featureGeneratorBytes = openFeatureGeneratorBytes(params.getFeaturegen());
// TODO: Support Custom resources:
// Must be loaded into memory, or written to tmp file until descriptor
// is loaded which defines parses when model is loaded
Map<String, Object> resources;
try {
resources = loadResources(params.getResources(), params.getFeaturegen());
}
catch (IOException e) {
throw new TerminateToolException(-1, e.getMessage(), e);
}
CmdLineUtil.checkOutputFile("name finder model", modelOutFile);
if (params.getNameTypes() != null) {
String[] nameTypes = params.getNameTypes().split(",");
sampleStream = new NameSampleTypeFilter(nameTypes, sampleStream);
}
String sequenceCodecImplName = params.getSequenceCodec();
if ("BIO".equals(sequenceCodecImplName)) {
sequenceCodecImplName = BioCodec.class.getName();
}
else if ("BILOU".equals(sequenceCodecImplName)) {
sequenceCodecImplName = BilouCodec.class.getName();
}
SequenceCodec<String> sequenceCodec =
TokenNameFinderFactory.instantiateSequenceCodec(sequenceCodecImplName);
TokenNameFinderFactory nameFinderFactory;
try {
nameFinderFactory = TokenNameFinderFactory.create(params.getFactory(),
featureGeneratorBytes, resources, sequenceCodec);
} catch (InvalidFormatException e) {
throw new TerminateToolException(-1, e.getMessage(), e);
}
NameSampleCountersStream counters = new NameSampleCountersStream(sampleStream);
sampleStream = counters;
TokenNameFinderModel model;
try {
model = opennlp.tools.namefind.NameFinderME.train(
params.getLang(), params.getType(), sampleStream, mlParams,
nameFinderFactory);
}
catch (IOException e) {
throw createTerminationIOException(e);
}
finally {
try {
sampleStream.close();
} catch (IOException e) {
// sorry that this can fail
}
}
System.out.println();
counters.printSummary();
System.out.println();
CmdLineUtil.writeModel("name finder", modelOutFile, model);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/namefind/TrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.namefind;
import java.io.File;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
import opennlp.tools.cmdline.params.BasicTrainingParams;
/**
* TrainingParameters for Name Finder.
*
* Note: Do not use this class, internal use only!
*/
interface TrainingParams extends BasicTrainingParams {
@ParameterDescription(valueName = "modelType", description = "The type of the token name finder model")
@OptionalParameter
String getType();
@ParameterDescription(valueName = "resourcesDir", description = "The resources directory")
@OptionalParameter
File getResources();
@ParameterDescription(valueName = "featuregenFile", description = "The feature generator descriptor file")
@OptionalParameter
File getFeaturegen();
@OptionalParameter
@ParameterDescription(valueName = "types", description = "name types to use for training")
String getNameTypes();
@OptionalParameter(defaultValue = "opennlp.tools.namefind.BioCodec")
@ParameterDescription(valueName = "codec", description = "sequence codec used to code name spans")
String getSequenceCodec();
@ParameterDescription(valueName = "factoryName", description = "A sub-class of TokenNameFinderFactory")
@OptionalParameter
String getFactory();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/params/BasicFormatParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.params;
import java.io.File;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
/**
* Common format parameters.
*/
public interface BasicFormatParams extends EncodingParameter {
@ParameterDescription(valueName = "sampleData", description = "data to be used, usually a file name.")
File getData();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/params/BasicTrainingParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.params;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
/**
* Common training parameters.
*
* Note: Do not use this class, internal use only!
*/
public interface BasicTrainingParams extends LanguageParams {
@ParameterDescription(valueName = "paramsFile", description = "training parameters file.")
@OptionalParameter()
String getParams();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/params/CVParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.params;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
/**
* Common cross validator parameters.
*
* Note: Do not use this class, internal use only!
*/
public interface CVParams {
@ParameterDescription(valueName = "true|false",
description = "if true will print false negatives and false positives.")
@OptionalParameter(defaultValue = "false")
Boolean getMisclassified();
@ParameterDescription(valueName = "num", description = "number of folds, default is 10.")
@OptionalParameter(defaultValue = "10")
Integer getFolds();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/params/DetailedFMeasureEvaluatorParams.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.params;
import opennlp.tools.cmdline.ArgumentParser.OptionalParameter;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
/**
* EvaluatorParams for Chunker.
*
* Note: Do not use this class, internal use only!
*/
public interface DetailedFMeasureEvaluatorParams {
@ParameterDescription(valueName = "true|false",
description = "if true (default) will print detailed FMeasure results.")
@OptionalParameter(defaultValue = "true")
@Deprecated // this will be removed in 1.8.0
Boolean getDetailedF();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/cmdline/params/DetokenizerParameter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.cmdline.params;
import opennlp.tools.cmdline.ArgumentParser.ParameterDescription;
public interface DetokenizerParameter {
@ParameterDescription(valueName = "dictionary",
description = "specifies the file with detokenizer dictionary.")
String getDetokenizer();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.