index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/DictionaryLemmatizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Lemmatize by simple dictionary lookup into a hashmap built from a file
* containing, for each line, word\tabpostag\tablemma.
* @version 2014-07-08
*/
public class DictionaryLemmatizer implements Lemmatizer {
/**
* The hashmap containing the dictionary.
*/
private final Map<List<String>, List<String>> dictMap = new HashMap<>();
/**
* Construct a hashmap from the input tab separated dictionary.
*
* The input file should have, for each line, word\tabpostag\tablemma.
* Alternatively, if multiple lemmas are possible for each word,postag pair,
* then the format should be word\tab\postag\tablemma01#lemma02#lemma03
*
* @param dictionary
* the input dictionary via inputstream
*/
public DictionaryLemmatizer(final InputStream dictionary) throws IOException {
init(dictionary);
}
public DictionaryLemmatizer(File dictionaryFile) throws IOException {
try (InputStream in = new FileInputStream(dictionaryFile)) {
init(in);
}
}
public DictionaryLemmatizer(Path dictionaryFile) throws IOException {
this(dictionaryFile.toFile());
}
private void init(InputStream dictionary) throws IOException {
final BufferedReader breader = new BufferedReader(
new InputStreamReader(dictionary));
String line;
while ((line = breader.readLine()) != null) {
final String[] elems = line.split("\t");
final String[] lemmas = elems[2].split("#");
this.dictMap.put(Arrays.asList(elems[0], elems[1]), Arrays.asList(lemmas));
}
}
/**
* Get the Map containing the dictionary.
*
* @return dictMap the Map
*/
public Map<List<String>, List<String>> getDictMap() {
return this.dictMap;
}
/**
* Get the dictionary keys (word and postag).
*
* @param word
* the surface form word
* @param postag
* the assigned postag
* @return returns the dictionary keys
*/
private List<String> getDictKeys(final String word, final String postag) {
final List<String> keys = new ArrayList<>();
keys.addAll(Arrays.asList(word.toLowerCase(), postag));
return keys;
}
public String[] lemmatize(final String[] tokens, final String[] postags) {
List<String> lemmas = new ArrayList<>();
for (int i = 0; i < tokens.length; i++) {
lemmas.add(this.lemmatize(tokens[i], postags[i]));
}
return lemmas.toArray(new String[lemmas.size()]);
}
public List<List<String>> lemmatize(final List<String> tokens, final List<String> posTags) {
List<List<String>> allLemmas = new ArrayList<>();
for (int i = 0; i < tokens.size(); i++) {
allLemmas.add(this.getAllLemmas(tokens.get(i), posTags.get(i)));
}
return allLemmas;
}
/**
* Lookup lemma in a dictionary. Outputs "O" if not found.
*
* @param word
* the token
* @param postag
* the postag
* @return the lemma
*/
private String lemmatize(final String word, final String postag) {
String lemma;
final List<String> keys = this.getDictKeys(word, postag);
// lookup lemma as value of the map
final List<String> keyValues = this.dictMap.get(keys);
if ( keyValues != null && !keyValues.isEmpty()) {
lemma = keyValues.get(0);
} else {
lemma = "O";
}
return lemma;
}
/**
* Lookup every lemma for a word,pos tag in a dictionary. Outputs "O" if not
* found.
*
* @param word
* the token
* @param postag
* the postag
* @return every lemma
*/
private List<String> getAllLemmas(final String word, final String postag) {
List<String> lemmasList = new ArrayList<>();
final List<String> keys = this.getDictKeys(word, postag);
// lookup lemma as value of the map
final List<String> keyValues = this.dictMap.get(keys);
if (keyValues != null && !keyValues.isEmpty()) {
lemmasList.addAll(keyValues);
} else {
lemmasList.add("O");
}
return lemmasList;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmaSample.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Represents an lemmatized sentence.
*/
public class LemmaSample implements Serializable {
private List<String> tokens;
private List<String> tags;
private final List<String> lemmas;
/**
* Represents one lemma sample.
* @param tokens the token
* @param tags the postags
* @param lemmas the lemmas
*/
public LemmaSample(String[] tokens, String[] tags, String[] lemmas) {
validateArguments(tokens.length, tags.length, lemmas.length);
this.tokens = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(tokens)));
this.tags = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(tags)));
this.lemmas = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(lemmas)));
}
/**
* Lemma Sample constructor.
* @param tokens the tokens
* @param tags the postags
* @param lemmas the lemmas
*/
public LemmaSample(List<String> tokens, List<String> tags, List<String> lemmas) {
validateArguments(tokens.size(), tags.size(), lemmas.size());
this.tokens = Collections.unmodifiableList(new ArrayList<>(tokens));
this.tags = Collections.unmodifiableList(new ArrayList<>(tags));
this.lemmas = Collections.unmodifiableList(new ArrayList<>(lemmas));
}
public String[] getTokens() {
return tokens.toArray(new String[tokens.size()]);
}
public String[] getTags() {
return tags.toArray(new String[tags.size()]);
}
public String[] getLemmas() {
return lemmas.toArray(new String[lemmas.size()]);
}
private void validateArguments(int tokensSize, int tagsSize, int lemmasSize)
throws IllegalArgumentException {
if (tokensSize != tagsSize || tagsSize != lemmasSize) {
throw new IllegalArgumentException(
"All arrays must have the same length: " +
"sentenceSize: " + tokensSize +
", tagsSize: " + tagsSize +
", predsSize: " + lemmasSize + "!");
}
}
@Override
public String toString() {
StringBuilder lemmaString = new StringBuilder();
for (int ci = 0; ci < lemmas.size(); ci++) {
lemmaString.append(tokens.get(ci)).append("\t").append(tags.get(ci))
.append("\t").append(lemmas.get(ci)).append("\n");
}
return lemmaString.toString();
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(getTokens()), Arrays.hashCode(getTags()),
Arrays.hashCode(getLemmas()));
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof LemmaSample) {
LemmaSample a = (LemmaSample) obj;
return Arrays.equals(getTokens(), a.getTokens())
&& Arrays.equals(getTags(), a.getTags())
&& Arrays.equals(getLemmas(), a.getLemmas());
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmaSampleEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import opennlp.tools.ml.model.Event;
import opennlp.tools.util.AbstractEventStream;
import opennlp.tools.util.ObjectStream;
/**
* Class for creating an event stream out of data files for training a probabilistic lemmatizer.
*/
public class LemmaSampleEventStream extends AbstractEventStream<LemmaSample> {
private LemmatizerContextGenerator contextGenerator;
/**
* Creates a new event stream based on the specified data stream using the specified context generator.
* @param d The data stream for this event stream.
* @param cg The context generator which should be used in the creation of events for this event stream.
*/
public LemmaSampleEventStream(ObjectStream<LemmaSample> d, LemmatizerContextGenerator cg) {
super(d);
this.contextGenerator = cg;
}
protected Iterator<Event> createEvents(LemmaSample sample) {
if (sample != null) {
List<Event> events = new ArrayList<>();
String[] toksArray = sample.getTokens();
String[] tagsArray = sample.getTags();
String[] lemmasArray = LemmatizerME.encodeLemmas(toksArray,sample.getLemmas());
for (int ei = 0, el = sample.getTokens().length; ei < el; ei++) {
events.add(new Event(lemmasArray[ei],
contextGenerator.getContext(ei,toksArray,tagsArray,lemmasArray)));
}
return events.iterator();
}
else {
return Collections.emptyListIterator();
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmaSampleSequenceStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.IOException;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.Sequence;
import opennlp.tools.ml.model.SequenceStream;
import opennlp.tools.util.ObjectStream;
public class LemmaSampleSequenceStream implements SequenceStream {
private final ObjectStream<LemmaSample> samples;
private final LemmatizerContextGenerator contextGenerator;
public LemmaSampleSequenceStream(ObjectStream<LemmaSample> samples,
LemmatizerContextGenerator contextGenerator) {
this.samples = samples;
this.contextGenerator = contextGenerator;
}
@Override
public Sequence read() throws IOException {
LemmaSample sample = samples.read();
if (sample != null) {
String[] sentence = sample.getTokens();
String[] tags = sample.getTags();
String[] preds = sample.getLemmas();
Event[] events = new Event[sentence.length];
for (int i = 0; i < sentence.length; i++) {
// it is safe to pass the tags as previous tags because
// the context generator does not look for non predicted tags
String[] context = contextGenerator.getContext(i, sentence, tags, preds);
events[i] = new Event(tags[i], context);
}
return new Sequence<>(events,sample);
}
return null;
}
@Override
public Event[] updateContext(Sequence sequence, AbstractModel model) {
// TODO: Should be implemented for Perceptron sequence learning ...
return null;
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
samples.reset();
}
@Override
public void close() throws IOException {
samples.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmaSampleStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import opennlp.tools.util.FilterObjectStream;
import opennlp.tools.util.ObjectStream;
/**
* Reads data for training and testing the lemmatizer. The format consists of:
* word\tpostag\tlemma.
* @version 2016-02-16
*/
public class LemmaSampleStream extends FilterObjectStream<String, LemmaSample> {
public LemmaSampleStream(ObjectStream<String> samples) {
super(samples);
}
public LemmaSample read() throws IOException {
List<String> toks = new ArrayList<>();
List<String> tags = new ArrayList<>();
List<String> preds = new ArrayList<>();
for (String line = samples.read(); line != null && !line.equals(""); line = samples.read()) {
String[] parts = line.split("\t");
if (parts.length != 3) {
System.err.println("Skipping corrupt line: " + line);
}
else {
toks.add(parts[0]);
tags.add(parts[1]);
preds.add(parts[2]);
}
}
if (toks.size() > 0) {
return new LemmaSample(toks.toArray(new String[toks.size()]), tags.toArray(new String[tags.size()]),
preds.toArray(new String[preds.size()]));
}
else {
return null;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/Lemmatizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.util.List;
/**
* The interface for lemmatizers.
*/
public interface Lemmatizer {
/**
* Generates lemmas for the word and postag returning the result in an array.
*
* @param toks an array of the tokens
* @param tags an array of the pos tags
*
* @return an array of possible lemmas for each token in the sequence.
*/
String[] lemmatize(String[] toks, String[] tags);
/**
* Generates a lemma tags for the word and postag returning the result in a list
* of every possible lemma for each token and postag.
*
* @param toks an array of the tokens
* @param tags an array of the pos tags
* @return a list of every possible lemma for each token in the sequence.
*/
List<List<String>> lemmatize(List<String> toks, List<String> tags);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import opennlp.tools.util.BeamSearchContextGenerator;
/**
* Interface for the context generator used for probabilistic lemmatizer.
*/
public interface LemmatizerContextGenerator extends BeamSearchContextGenerator<String> {
/**
* Returns the contexts for lemmatizing of the specified index.
* @param i The index of the token in the specified toks array for which the context should be constructed.
* @param toks The tokens of the sentence. The <code>toString</code> methods of
* these objects should return the token text.
* @param tags The POS tags for the the specified tokens.
* @param lemmas The previous decisions made in the tagging of this sequence.
* Only indices less than i will be examined.
* @return An array of predictive contexts on which a model basis its decisions.
*/
String[] getContext(int i, String[] toks, String[] tags, String[] lemmas);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerEvaluationMonitor.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import opennlp.tools.util.eval.EvaluationMonitor;
/**
* Interface for the lemmatizer evaluator.
* @version 2016-02-18
*
*/
public interface LemmatizerEvaluationMonitor extends EvaluationMonitor<LemmaSample> {
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerEvaluator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import opennlp.tools.util.eval.Evaluator;
import opennlp.tools.util.eval.Mean;
/**
* The {@link LemmatizerEvaluator} measures the performance of
* the given {@link Lemmatizer} with the provided reference
* {@link LemmaSample}s.
*/
public class LemmatizerEvaluator extends Evaluator<LemmaSample> {
private Lemmatizer lemmatizer;
private Mean wordAccuracy = new Mean();
/**
* Initializes the current instance.
*
* @param aLemmatizer a lemmatizer
* @param listeners an array of evaluation listeners
*/
public LemmatizerEvaluator(Lemmatizer aLemmatizer, LemmatizerEvaluationMonitor ... listeners) {
super(listeners);
this.lemmatizer = aLemmatizer;
}
/**
* Evaluates the given reference {@link LemmaSample} object.
*
* This is done by tagging the sentence from the reference
* {@link LemmaSample} with the {@link Lemmatizer}. The
* tags are then used to update the word accuracy score.
*
* @param reference the reference {@link LemmaSample}.
*
* @return the predicted {@link LemmaSample}.
*/
@Override
protected LemmaSample processSample(LemmaSample reference) {
String[] predictedLemmas = lemmatizer.lemmatize(reference.getTokens(), reference.getTags());
String[] referenceLemmas = reference.getLemmas();
for (int i = 0; i < referenceLemmas.length; i++) {
if (referenceLemmas[i].equals(predictedLemmas[i])) {
wordAccuracy.add(1);
}
else {
wordAccuracy.add(0);
}
}
return new LemmaSample(reference.getTokens(), reference.getTags(), predictedLemmas);
}
/**
* Retrieves the word accuracy.
*
* This is defined as:
* word accuracy = correctly detected tags / total words
*
* @return the word accuracy
*/
public double getWordAccuracy() {
return wordAccuracy.mean();
}
/**
* Retrieves the total number of words considered
* in the evaluation.
*
* @return the word count
*/
public long getWordCount() {
return wordAccuracy.count();
}
/**
* Represents this objects as human readable {@link String}.
*/
@Override
public String toString() {
return "Accuracy:" + wordAccuracy.mean() +
" Number of Samples: " + wordAccuracy.count();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import opennlp.tools.util.BaseToolFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.ext.ExtensionLoader;
public class LemmatizerFactory extends BaseToolFactory {
/**
* Creates a {@link LemmatizerFactory} that provides the default implementation
* of the resources.
*/
public LemmatizerFactory() {
}
public static LemmatizerFactory create(String subclassName)
throws InvalidFormatException {
if (subclassName == null) {
// will create the default factory
return new LemmatizerFactory();
}
try {
return ExtensionLoader.instantiateExtension(LemmatizerFactory.class, subclassName);
} catch (Exception e) {
String msg = "Could not instantiate the " + subclassName
+ ". The initialization throw an exception.";
System.err.println(msg);
e.printStackTrace();
throw new InvalidFormatException(msg, e);
}
}
@Override
public void validateArtifactMap() throws InvalidFormatException {
// no additional artifacts
}
public SequenceValidator<String> getSequenceValidator() {
return new DefaultLemmatizerSequenceValidator();
}
public LemmatizerContextGenerator getContextGenerator() {
return new DefaultLemmatizerContextGenerator();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerME.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import opennlp.tools.ml.BeamSearch;
import opennlp.tools.ml.EventModelSequenceTrainer;
import opennlp.tools.ml.EventTrainer;
import opennlp.tools.ml.SequenceTrainer;
import opennlp.tools.ml.TrainerFactory;
import opennlp.tools.ml.TrainerFactory.TrainerType;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.Sequence;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.StringUtil;
import opennlp.tools.util.TrainingParameters;
/**
* A probabilistic lemmatizer. Tries to predict the induced permutation class
* for each word depending on its surrounding context. Based on
* Grzegorz Chrupała. 2008. Towards a Machine-Learning Architecture
* for Lexical Functional Grammar Parsing. PhD dissertation, Dublin City University.
* http://grzegorz.chrupala.me/papers/phd-single.pdf
*/
public class LemmatizerME implements Lemmatizer {
public static final int LEMMA_NUMBER = 29;
public static final int DEFAULT_BEAM_SIZE = 3;
protected int beamSize;
private Sequence bestSequence;
private SequenceClassificationModel<String> model;
private LemmatizerContextGenerator contextGenerator;
private SequenceValidator<String> sequenceValidator;
/**
* Initializes the current instance with the provided model
* and the default beam size of 3.
*
* @param model the model
*/
public LemmatizerME(LemmatizerModel model) {
LemmatizerFactory factory = model.getFactory();
int defaultBeamSize = LemmatizerME.DEFAULT_BEAM_SIZE;
String beamSizeString = model.getManifestProperty(BeamSearch.BEAM_SIZE_PARAMETER);
if (beamSizeString != null) {
defaultBeamSize = Integer.parseInt(beamSizeString);
}
contextGenerator = factory.getContextGenerator();
beamSize = defaultBeamSize;
sequenceValidator = factory.getSequenceValidator();
if (model.getLemmatizerSequenceModel() != null) {
this.model = model.getLemmatizerSequenceModel();
}
else {
this.model = new opennlp.tools.ml.BeamSearch<>(beamSize,
(MaxentModel) model.getLemmatizerSequenceModel(), 0);
}
}
public String[] lemmatize(String[] toks, String[] tags) {
String[] ses = predictSES(toks, tags);
String[] lemmas = decodeLemmas(toks, ses);
return lemmas;
}
@Override public List<List<String>> lemmatize(List<String> toks,
List<String> tags) {
String[] tokens = toks.toArray(new String[toks.size()]);
String[] posTags = tags.toArray(new String[tags.size()]);
String[][] allLemmas = predictLemmas(LEMMA_NUMBER, tokens, posTags);
List<List<String>> predictedLemmas = new ArrayList<>();
for (int i = 0; i < allLemmas.length; i++) {
predictedLemmas.add(Arrays.asList(allLemmas[i]));
}
return predictedLemmas;
}
/**
* Predict Short Edit Script (automatically induced lemma class).
* @param toks the array of tokens
* @param tags the array of pos tags
* @return an array containing the lemma classes
*/
public String[] predictSES(String[] toks, String[] tags) {
bestSequence = model.bestSequence(toks, new Object[] {tags}, contextGenerator, sequenceValidator);
List<String> ses = bestSequence.getOutcomes();
return ses.toArray(new String[ses.size()]);
}
/**
* Predict all possible lemmas (using a default upper bound).
* @param numLemmas the default number of lemmas
* @param toks the tokens
* @param tags the postags
* @return a double array containing all posible lemmas for each token and postag pair
*/
public String[][] predictLemmas(int numLemmas, String[] toks, String[] tags) {
Sequence[] bestSequences = model.bestSequences(numLemmas, toks, new Object[] {tags},
contextGenerator, sequenceValidator);
String[][] allLemmas = new String[bestSequences.length][];
for (int i = 0; i < allLemmas.length; i++) {
List<String> ses = bestSequences[i].getOutcomes();
String[] sesArray = ses.toArray(new String[ses.size()]);
allLemmas[i] = decodeLemmas(toks,sesArray);
}
return allLemmas;
}
/**
* Decodes the lemma from the word and the induced lemma class.
* @param toks the array of tokens
* @param preds the predicted lemma classes
* @return the array of decoded lemmas
*/
public static String[] decodeLemmas(String[] toks, String[] preds) {
List<String> lemmas = new ArrayList<>();
for (int i = 0; i < toks.length; i++) {
String lemma = StringUtil.decodeShortestEditScript(toks[i].toLowerCase(), preds[i]);
if (lemma.length() == 0) {
lemma = "_";
}
lemmas.add(lemma);
}
return lemmas.toArray(new String[lemmas.size()]);
}
public static String[] encodeLemmas(String[] toks, String[] lemmas) {
List<String> sesList = new ArrayList<>();
for (int i = 0; i < toks.length; i++) {
String ses = StringUtil.getShortestEditScript(toks[i], lemmas[i]);
if (ses.length() == 0) {
ses = "_";
}
sesList.add(ses);
}
return sesList.toArray(new String[sesList.size()]);
}
public Sequence[] topKSequences(String[] sentence, String[] tags) {
return model.bestSequences(DEFAULT_BEAM_SIZE, sentence,
new Object[] { tags }, contextGenerator, sequenceValidator);
}
public Sequence[] topKSequences(String[] sentence, String[] tags, double minSequenceScore) {
return model.bestSequences(DEFAULT_BEAM_SIZE, sentence, new Object[] { tags }, minSequenceScore,
contextGenerator, sequenceValidator);
}
/**
* Populates the specified array with the probabilities of the last decoded sequence. The
* sequence was determined based on the previous call to <code>lemmatize</code>. The
* specified array should be at least as large as the number of tokens in the
* previous call to <code>lemmatize</code>.
*
* @param probs An array used to hold the probabilities of the last decoded sequence.
*/
public void probs(double[] probs) {
bestSequence.getProbs(probs);
}
/**
* Returns an array with the probabilities of the last decoded sequence. The
* sequence was determined based on the previous call to <code>chunk</code>.
* @return An array with the same number of probabilities as tokens were sent to <code>chunk</code>
* when it was last called.
*/
public double[] probs() {
return bestSequence.getProbs();
}
public static LemmatizerModel train(String languageCode,
ObjectStream<LemmaSample> samples, TrainingParameters trainParams,
LemmatizerFactory posFactory) throws IOException {
int beamSize = trainParams.getIntParameter(BeamSearch.BEAM_SIZE_PARAMETER,
LemmatizerME.DEFAULT_BEAM_SIZE);
LemmatizerContextGenerator contextGenerator = posFactory.getContextGenerator();
Map<String, String> manifestInfoEntries = new HashMap<>();
TrainerType trainerType = TrainerFactory.getTrainerType(trainParams);
MaxentModel lemmatizerModel = null;
SequenceClassificationModel<String> seqLemmatizerModel = null;
if (TrainerType.EVENT_MODEL_TRAINER.equals(trainerType)) {
ObjectStream<Event> es = new LemmaSampleEventStream(samples, contextGenerator);
EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams,
manifestInfoEntries);
lemmatizerModel = trainer.train(es);
}
else if (TrainerType.EVENT_MODEL_SEQUENCE_TRAINER.equals(trainerType)) {
LemmaSampleSequenceStream ss = new LemmaSampleSequenceStream(samples, contextGenerator);
EventModelSequenceTrainer trainer =
TrainerFactory.getEventModelSequenceTrainer(trainParams, manifestInfoEntries);
lemmatizerModel = trainer.train(ss);
}
else if (TrainerType.SEQUENCE_TRAINER.equals(trainerType)) {
SequenceTrainer trainer = TrainerFactory.getSequenceModelTrainer(
trainParams, manifestInfoEntries);
// TODO: This will probably cause issue, since the feature generator uses the outcomes array
LemmaSampleSequenceStream ss = new LemmaSampleSequenceStream(samples, contextGenerator);
seqLemmatizerModel = trainer.train(ss);
}
else {
throw new IllegalArgumentException("Trainer type is not supported: " + trainerType);
}
if (lemmatizerModel != null) {
return new LemmatizerModel(languageCode, lemmatizerModel, beamSize, manifestInfoEntries, posFactory);
}
else {
return new LemmatizerModel(languageCode, seqLemmatizerModel, manifestInfoEntries, posFactory);
}
}
public Sequence[] topKLemmaClasses(String[] sentence, String[] tags) {
return model.bestSequences(DEFAULT_BEAM_SIZE, sentence,
new Object[] { tags }, contextGenerator, sequenceValidator);
}
public Sequence[] topKLemmaClasses(String[] sentence, String[] tags, double minSequenceScore) {
return model.bestSequences(DEFAULT_BEAM_SIZE, sentence, new Object[] { tags }, minSequenceScore,
contextGenerator, sequenceValidator);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/LemmatizerModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.lemmatizer;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.util.Map;
import java.util.Properties;
import opennlp.tools.ml.BeamSearch;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.util.BaseToolFactory;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.model.BaseModel;
/**
* The {@link LemmatizerModel} is the model used
* by a learnable {@link Lemmatizer}.
*
* @see LemmatizerME
*/
public class LemmatizerModel extends BaseModel {
private static final String COMPONENT_NAME = "StatisticalLemmatizer";
private static final String LEMMATIZER_MODEL_ENTRY_NAME = "lemmatizer.model";
public LemmatizerModel(String languageCode, SequenceClassificationModel<String> lemmatizerModel,
Map<String, String> manifestInfoEntries, LemmatizerFactory factory) {
super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory);
artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel);
checkArtifactMap();
}
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel,
Map<String, String> manifestInfoEntries, LemmatizerFactory factory) {
this(languageCode, lemmatizerModel, LemmatizerME.DEFAULT_BEAM_SIZE, manifestInfoEntries, factory);
}
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel, int beamSize,
Map<String, String> manifestInfoEntries, LemmatizerFactory factory) {
super(COMPONENT_NAME, languageCode, manifestInfoEntries, factory);
artifactMap.put(LEMMATIZER_MODEL_ENTRY_NAME, lemmatizerModel);
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
manifest.put(BeamSearch.BEAM_SIZE_PARAMETER, Integer.toString(beamSize));
checkArtifactMap();
}
public LemmatizerModel(String languageCode, MaxentModel lemmatizerModel, LemmatizerFactory factory) {
this(languageCode, lemmatizerModel, null, factory);
}
public LemmatizerModel(InputStream in) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, in);
}
public LemmatizerModel(File modelFile) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, modelFile);
}
public LemmatizerModel(Path modelPath) throws IOException, InvalidFormatException {
this(modelPath.toFile());
}
public LemmatizerModel(URL modelURL) throws IOException, InvalidFormatException {
super(COMPONENT_NAME, modelURL);
}
@Override
protected void validateArtifactMap() throws InvalidFormatException {
super.validateArtifactMap();
if (!(artifactMap.get(LEMMATIZER_MODEL_ENTRY_NAME) instanceof AbstractModel)) {
throw new InvalidFormatException("Lemmatizer model is incomplete!");
}
}
public SequenceClassificationModel<String> getLemmatizerSequenceModel() {
Properties manifest = (Properties) artifactMap.get(MANIFEST_ENTRY);
if (artifactMap.get(LEMMATIZER_MODEL_ENTRY_NAME) instanceof MaxentModel) {
String beamSizeString = manifest.getProperty(BeamSearch.BEAM_SIZE_PARAMETER);
int beamSize = LemmatizerME.DEFAULT_BEAM_SIZE;
if (beamSizeString != null) {
beamSize = Integer.parseInt(beamSizeString);
}
return new BeamSearch<>(beamSize, (MaxentModel) artifactMap.get(LEMMATIZER_MODEL_ENTRY_NAME));
}
else if (artifactMap.get(LEMMATIZER_MODEL_ENTRY_NAME) instanceof SequenceClassificationModel) {
return (SequenceClassificationModel) artifactMap.get(LEMMATIZER_MODEL_ENTRY_NAME);
}
else {
return null;
}
}
@Override
protected Class<? extends BaseToolFactory> getDefaultFactory() {
return LemmatizerFactory.class;
}
public LemmatizerFactory getFactory() {
return (LemmatizerFactory) this.toolFactory;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/lemmatizer/package-info.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Package related with the lemmatizer tool
*/
package opennlp.tools.lemmatizer;
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/AbstractEventModelSequenceTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceStream;
public abstract class AbstractEventModelSequenceTrainer extends AbstractTrainer implements
EventModelSequenceTrainer {
public AbstractEventModelSequenceTrainer() {
}
public abstract MaxentModel doTrain(SequenceStream events)
throws IOException;
public final MaxentModel train(SequenceStream events) throws IOException {
validate();
MaxentModel model = doTrain(events);
addToReport(AbstractTrainer.TRAINER_TYPE_PARAM,
EventModelSequenceTrainer.SEQUENCE_VALUE);
return model;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/AbstractEventTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import opennlp.tools.ml.model.AbstractDataIndexer;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.DataIndexerFactory;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.HashSumEventStream;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.util.InsufficientTrainingDataException;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
public abstract class AbstractEventTrainer extends AbstractTrainer implements EventTrainer {
public static final String DATA_INDEXER_PARAM = "DataIndexer";
public static final String DATA_INDEXER_ONE_PASS_VALUE = "OnePass";
public static final String DATA_INDEXER_TWO_PASS_VALUE = "TwoPass";
public static final String DATA_INDEXER_ONE_PASS_REAL_VALUE = "OnePassRealValue";
public AbstractEventTrainer() {
}
public AbstractEventTrainer(TrainingParameters parameters) {
super(parameters);
}
@Override
public void validate() {
super.validate();
}
@Deprecated
@Override
public boolean isValid() {
return super.isValid();
}
public abstract boolean isSortAndMerge();
public DataIndexer getDataIndexer(ObjectStream<Event> events) throws IOException {
trainingParameters.put(AbstractDataIndexer.SORT_PARAM, isSortAndMerge());
// If the cutoff was set, don't overwrite the value.
if (trainingParameters.getIntParameter(CUTOFF_PARAM, -1) == -1) {
trainingParameters.put(CUTOFF_PARAM, 5);
}
DataIndexer indexer = DataIndexerFactory.getDataIndexer(trainingParameters, reportMap);
indexer.index(events);
return indexer;
}
public abstract MaxentModel doTrain(DataIndexer indexer) throws IOException;
public final MaxentModel train(DataIndexer indexer) throws IOException {
validate();
if (indexer.getOutcomeLabels().length <= 1) {
throw new InsufficientTrainingDataException("Training data must contain more than one outcome");
}
MaxentModel model = doTrain(indexer);
addToReport(AbstractTrainer.TRAINER_TYPE_PARAM, EventTrainer.EVENT_VALUE);
return model;
}
public final MaxentModel train(ObjectStream<Event> events) throws IOException {
validate();
HashSumEventStream hses = new HashSumEventStream(events);
DataIndexer indexer = getDataIndexer(hses);
addToReport("Training-Eventhash", hses.calculateHashSum().toString(16));
return train(indexer);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/AbstractSequenceTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.ml.model.SequenceStream;
public abstract class AbstractSequenceTrainer extends AbstractTrainer implements
SequenceTrainer {
public AbstractSequenceTrainer() {
}
public abstract SequenceClassificationModel<String> doTrain(SequenceStream events)
throws IOException;
public final SequenceClassificationModel<String> train(SequenceStream events) throws IOException {
validate();
SequenceClassificationModel<String> model = doTrain(events);
addToReport(AbstractTrainer.TRAINER_TYPE_PARAM, SequenceTrainer.SEQUENCE_VALUE);
return model;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/AbstractTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.ml.maxent.GISTrainer;
import opennlp.tools.util.TrainingParameters;
public abstract class AbstractTrainer {
public static final String ALGORITHM_PARAM = "Algorithm";
public static final String TRAINER_TYPE_PARAM = "TrainerType";
public static final String CUTOFF_PARAM = "Cutoff";
public static final int CUTOFF_DEFAULT = 5;
public static final String ITERATIONS_PARAM = "Iterations";
public static final int ITERATIONS_DEFAULT = 100;
public static final String VERBOSE_PARAM = "PrintMessages";
public static final boolean VERBOSE_DEFAULT = true;
protected TrainingParameters trainingParameters;
protected Map<String,String> reportMap;
protected boolean printMessages;
public AbstractTrainer() {
}
public AbstractTrainer(TrainingParameters parameters) {
init(parameters,new HashMap<>());
}
public void init(TrainingParameters trainingParameters, Map<String,String> reportMap) {
this.trainingParameters = trainingParameters;
if (reportMap == null) reportMap = new HashMap<>();
this.reportMap = reportMap;
printMessages = trainingParameters.getBooleanParameter(VERBOSE_PARAM, VERBOSE_DEFAULT);
}
@Deprecated
public void init(Map<String, String> trainParams, Map<String, String> reportMap) {
init(new TrainingParameters(trainParams),reportMap);
}
public String getAlgorithm() {
return trainingParameters.getStringParameter(ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
}
public int getCutoff() {
return trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
}
public int getIterations() {
return trainingParameters.getIntParameter(ITERATIONS_PARAM, ITERATIONS_DEFAULT);
}
/**
* Check parameters. If subclass overrides this, it should call super.validate();
*
* @throws java.lang.IllegalArgumentException
*/
public void validate() {
// TODO: Need to validate all parameters correctly ... error prone?!
// should validate if algorithm is set? What about the Parser?
try {
trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
trainingParameters.getIntParameter(ITERATIONS_PARAM, ITERATIONS_DEFAULT);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(e);
}
}
/**
* @deprecated Use {@link #validate()} instead.
* @return
*/
@Deprecated
public boolean isValid() {
try {
validate();
return true;
}
catch (IllegalArgumentException e) {
return false;
}
}
/**
* Use the TrainingParameters directly...
* @param key
* @param defaultValue
*/
@Deprecated
protected String getStringParam(String key, String defaultValue) {
return trainingParameters.getStringParameter(key, defaultValue);
}
/**
* Use the PluggableParameters directly...
* @param key
* @param defaultValue
*/
@Deprecated
protected int TrainingParameters(String key, int defaultValue) {
return trainingParameters.getIntParameter(key, defaultValue);
}
/**
* Use the PluggableParameters directly...
* @param key
* @param defaultValue
*/
@Deprecated
protected double getDoubleParam(String key, double defaultValue) {
return trainingParameters.getDoubleParameter(key, defaultValue);
}
/**
* Use the PluggableParameters directly...
* @param key
* @param defaultValue
*/
@Deprecated
protected boolean getBooleanParam(String key, boolean defaultValue) {
return trainingParameters.getBooleanParameter(key, defaultValue);
}
/**
* Adds the key/Value to the report map.
* @param key
* @param value
*/
protected void addToReport(String key, String value) {
reportMap.put(key, value);
}
protected void display(String s) {
if (printMessages) {
System.out.print(s);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/BeamSearch.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.util.Arrays;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Queue;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.util.BeamSearchContextGenerator;
import opennlp.tools.util.Cache;
import opennlp.tools.util.Sequence;
import opennlp.tools.util.SequenceValidator;
/**
* Performs k-best search over sequence. This is based on the description in
* Ratnaparkhi (1998), PhD diss, Univ. of Pennsylvania.
*
* @see Sequence
* @see SequenceValidator
* @see BeamSearchContextGenerator
*/
public class BeamSearch<T> implements SequenceClassificationModel<T> {
public static final String BEAM_SIZE_PARAMETER = "BeamSize";
private static final Object[] EMPTY_ADDITIONAL_CONTEXT = new Object[0];
protected int size;
protected MaxentModel model;
private double[] probs;
private Cache<String[], double[]> contextsCache;
private static final int zeroLog = -100000;
/**
* Creates new search object.
*
* @param size The size of the beam (k).
* @param model the model for assigning probabilities to the sequence outcomes.
*/
public BeamSearch(int size, MaxentModel model) {
this(size, model, 0);
}
public BeamSearch(int size, MaxentModel model, int cacheSize) {
this.size = size;
this.model = model;
if (cacheSize > 0) {
contextsCache = new Cache<>(cacheSize);
}
this.probs = new double[model.getNumOutcomes()];
}
/**
* Returns the best sequence of outcomes based on model for this object.
*
* @param sequence The input sequence.
* @param additionalContext An Object[] of additional context.
* This is passed to the context generator blindly with the
* assumption that the context are appropiate.
*
* @return The top ranked sequence of outcomes or null if no sequence could be found
*/
public Sequence[] bestSequences(int numSequences, T[] sequence,
Object[] additionalContext, double minSequenceScore,
BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator) {
Queue<Sequence> prev = new PriorityQueue<>(size);
Queue<Sequence> next = new PriorityQueue<>(size);
Queue<Sequence> tmp;
prev.add(new Sequence());
if (additionalContext == null) {
additionalContext = EMPTY_ADDITIONAL_CONTEXT;
}
for (int i = 0; i < sequence.length; i++) {
int sz = Math.min(size, prev.size());
for (int sc = 0; prev.size() > 0 && sc < sz; sc++) {
Sequence top = prev.remove();
List<String> tmpOutcomes = top.getOutcomes();
String[] outcomes = tmpOutcomes.toArray(new String[tmpOutcomes.size()]);
String[] contexts = cg.getContext(i, sequence, outcomes, additionalContext);
double[] scores;
if (contextsCache != null) {
scores = contextsCache.computeIfAbsent(contexts, c -> model.eval(c, probs));
} else {
scores = model.eval(contexts, probs);
}
double[] temp_scores = new double[scores.length];
System.arraycopy(scores, 0, temp_scores, 0, scores.length);
Arrays.sort(temp_scores);
double min = temp_scores[Math.max(0,scores.length - size)];
for (int p = 0; p < scores.length; p++) {
if (scores[p] >= min) {
String out = model.getOutcome(p);
if (validator.validSequence(i, sequence, outcomes, out)) {
Sequence ns = new Sequence(top, out, scores[p]);
if (ns.getScore() > minSequenceScore) {
next.add(ns);
}
}
}
}
if (next.size() == 0) { //if no advanced sequences, advance all valid
for (int p = 0; p < scores.length; p++) {
String out = model.getOutcome(p);
if (validator.validSequence(i, sequence, outcomes, out)) {
Sequence ns = new Sequence(top, out, scores[p]);
if (ns.getScore() > minSequenceScore) {
next.add(ns);
}
}
}
}
}
// make prev = next; and re-init next (we reuse existing prev set once we clear it)
prev.clear();
tmp = prev;
prev = next;
next = tmp;
}
int numSeq = Math.min(numSequences, prev.size());
Sequence[] topSequences = new Sequence[numSeq];
for (int seqIndex = 0; seqIndex < numSeq; seqIndex++) {
topSequences[seqIndex] = prev.remove();
}
return topSequences;
}
public Sequence[] bestSequences(int numSequences, T[] sequence,
Object[] additionalContext, BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator) {
return bestSequences(numSequences, sequence, additionalContext, zeroLog, cg, validator);
}
public Sequence bestSequence(T[] sequence, Object[] additionalContext,
BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator) {
Sequence[] sequences = bestSequences(1, sequence, additionalContext, cg, validator);
if (sequences.length > 0)
return sequences[0];
else
return null;
}
@Override
public String[] getOutcomes() {
String[] outcomes = new String[model.getNumOutcomes()];
for (int i = 0; i < model.getNumOutcomes(); i++) {
outcomes[i] = model.getOutcome(i);
}
return outcomes;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/EventModelSequenceTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.SequenceStream;
import opennlp.tools.util.TrainingParameters;
public interface EventModelSequenceTrainer {
String SEQUENCE_VALUE = "EventModelSequence";
@Deprecated
void init(Map<String, String> trainParams, Map<String, String> reportMap);
void init(TrainingParameters trainParams, Map<String, String> reportMap);
MaxentModel train(SequenceStream events) throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/EventTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
public interface EventTrainer {
String EVENT_VALUE = "Event";
@Deprecated
void init(Map<String, String> trainParams, Map<String, String> reportMap);
void init(TrainingParameters trainingParams, Map<String, String> reportMap);
MaxentModel train(ObjectStream<Event> events) throws IOException;
MaxentModel train(DataIndexer indexer) throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/SequenceTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.io.IOException;
import java.util.Map;
import opennlp.tools.ml.model.SequenceClassificationModel;
import opennlp.tools.ml.model.SequenceStream;
import opennlp.tools.util.TrainingParameters;
public interface SequenceTrainer {
String SEQUENCE_VALUE = "Sequence";
@Deprecated
void init(Map<String, String> trainParams, Map<String, String> reportMap);
void init(TrainingParameters trainParams, Map<String, String> reportMap);
SequenceClassificationModel<String> train(SequenceStream events) throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/TrainerFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml;
import java.lang.reflect.Constructor;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.ml.maxent.GISTrainer;
import opennlp.tools.ml.maxent.quasinewton.QNTrainer;
import opennlp.tools.ml.naivebayes.NaiveBayesTrainer;
import opennlp.tools.ml.perceptron.PerceptronTrainer;
import opennlp.tools.ml.perceptron.SimplePerceptronSequenceTrainer;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.ext.ExtensionLoader;
import opennlp.tools.util.ext.ExtensionNotLoadedException;
public class TrainerFactory {
public enum TrainerType {
EVENT_MODEL_TRAINER,
EVENT_MODEL_SEQUENCE_TRAINER,
SEQUENCE_TRAINER
}
// built-in trainers
private static final Map<String, Class> BUILTIN_TRAINERS;
static {
Map<String, Class> _trainers = new HashMap<>();
_trainers.put(GISTrainer.MAXENT_VALUE, GISTrainer.class);
_trainers.put(QNTrainer.MAXENT_QN_VALUE, QNTrainer.class);
_trainers.put(PerceptronTrainer.PERCEPTRON_VALUE, PerceptronTrainer.class);
_trainers.put(SimplePerceptronSequenceTrainer.PERCEPTRON_SEQUENCE_VALUE,
SimplePerceptronSequenceTrainer.class);
_trainers.put(NaiveBayesTrainer.NAIVE_BAYES_VALUE, NaiveBayesTrainer.class);
BUILTIN_TRAINERS = Collections.unmodifiableMap(_trainers);
}
/**
* Determines the trainer type based on the ALGORITHM_PARAM value.
*
* @param trainParams - Map of training parameters
* @return the trainer type or null if type couldn't be determined.
*/
public static TrainerType getTrainerType(TrainingParameters trainParams) {
String algorithmValue = trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
// Check if it is defaulting to the MAXENT trainer
if (algorithmValue == null) {
return TrainerType.EVENT_MODEL_TRAINER;
}
Class<?> trainerClass = BUILTIN_TRAINERS.get(algorithmValue);
if (trainerClass != null) {
if (EventTrainer.class.isAssignableFrom(trainerClass)) {
return TrainerType.EVENT_MODEL_TRAINER;
}
else if (EventModelSequenceTrainer.class.isAssignableFrom(trainerClass)) {
return TrainerType.EVENT_MODEL_SEQUENCE_TRAINER;
}
else if (SequenceTrainer.class.isAssignableFrom(trainerClass)) {
return TrainerType.SEQUENCE_TRAINER;
}
}
// Try to load the different trainers, and return the type on success
try {
ExtensionLoader.instantiateExtension(EventTrainer.class, algorithmValue);
return TrainerType.EVENT_MODEL_TRAINER;
}
catch (ExtensionNotLoadedException ignored) {
// this is ignored
}
try {
ExtensionLoader.instantiateExtension(EventModelSequenceTrainer.class, algorithmValue);
return TrainerType.EVENT_MODEL_SEQUENCE_TRAINER;
}
catch (ExtensionNotLoadedException ignored) {
// this is ignored
}
try {
ExtensionLoader.instantiateExtension(SequenceTrainer.class, algorithmValue);
return TrainerType.SEQUENCE_TRAINER;
}
catch (ExtensionNotLoadedException ignored) {
// this is ignored
}
return null;
}
public static SequenceTrainer getSequenceModelTrainer(TrainingParameters trainParams,
Map<String, String> reportMap) {
String trainerType = trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
if (trainerType != null) {
if (BUILTIN_TRAINERS.containsKey(trainerType)) {
SequenceTrainer trainer = TrainerFactory.<SequenceTrainer>createBuiltinTrainer(
BUILTIN_TRAINERS.get(trainerType));
trainer.init(trainParams, reportMap);
return trainer;
} else {
SequenceTrainer trainer =
ExtensionLoader.instantiateExtension(SequenceTrainer.class, trainerType);
trainer.init(trainParams, reportMap);
return trainer;
}
}
else {
throw new IllegalArgumentException("Trainer type couldn't be determined!");
}
}
public static EventModelSequenceTrainer getEventModelSequenceTrainer(TrainingParameters trainParams,
Map<String, String> reportMap) {
String trainerType = trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
if (trainerType != null) {
if (BUILTIN_TRAINERS.containsKey(trainerType)) {
EventModelSequenceTrainer trainer = TrainerFactory.<EventModelSequenceTrainer>createBuiltinTrainer(
BUILTIN_TRAINERS.get(trainerType));
trainer.init(trainParams, reportMap);
return trainer;
} else {
EventModelSequenceTrainer trainer =
ExtensionLoader.instantiateExtension(EventModelSequenceTrainer.class, trainerType);
trainer.init(trainParams, reportMap);
return trainer;
}
}
else {
throw new IllegalArgumentException("Trainer type couldn't be determined!");
}
}
public static EventTrainer getEventTrainer(TrainingParameters trainParams,
Map<String, String> reportMap) {
// if the trainerType is not defined -- use the GISTrainer.
String trainerType =
trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
if (BUILTIN_TRAINERS.containsKey(trainerType)) {
EventTrainer trainer = TrainerFactory.<EventTrainer>createBuiltinTrainer(
BUILTIN_TRAINERS.get(trainerType));
trainer.init(trainParams, reportMap);
return trainer;
} else {
EventTrainer trainer = ExtensionLoader.instantiateExtension(EventTrainer.class, trainerType);
trainer.init(trainParams, reportMap);
return trainer;
}
}
public static boolean isValid(TrainingParameters trainParams) {
// TODO: Need to validate all parameters correctly ... error prone?!
String algorithmName = trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
// If a trainer type can be determined, then the trainer is valid!
if (algorithmName != null &&
!(BUILTIN_TRAINERS.containsKey(algorithmName) || getTrainerType(trainParams) != null)) {
return false;
}
try {
// require that the Cutoff and the number of iterations be an integer.
// if they are not set, the default values will be ok.
trainParams.getIntParameter(AbstractTrainer.CUTOFF_PARAM, 0);
trainParams.getIntParameter(AbstractTrainer.ITERATIONS_PARAM, 0);
}
catch (NumberFormatException e) {
return false;
}
// no reason to require that the dataIndexer be a 1-pass or 2-pass dataindexer.
trainParams.getStringParameter(AbstractEventTrainer.DATA_INDEXER_PARAM, null);
// TODO: Check data indexing ...
return true;
}
private static <T> T createBuiltinTrainer(Class<T> trainerClass) {
T theTrainer = null;
if (trainerClass != null) {
try {
Constructor<T> contructor = trainerClass.getConstructor();
theTrainer = contructor.newInstance();
} catch (Exception e) {
String msg = "Could not instantiate the "
+ trainerClass.getCanonicalName()
+ ". The initialization throw an exception.";
System.err.println(msg);
e.printStackTrace();
throw new IllegalArgumentException(msg, e);
}
}
return theTrainer;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/BasicContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
/**
* Generate contexts for maxent decisions, assuming that the input
* given to the getContext() method is a String containing contextual
* predicates separated by spaces.
* e.g:
* <p>
* cp_1 cp_2 ... cp_n
* </p>
*/
public class BasicContextGenerator implements ContextGenerator<String> {
private String separator = " ";
public BasicContextGenerator() {}
public BasicContextGenerator(String sep) {
separator = sep;
}
/**
* Builds up the list of contextual predicates given a String.
*/
public String[] getContext(String o) {
String s = (String) o;
return s.split(separator);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/ContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
/**
* Generate contexts for maxent decisions.
*/
public interface ContextGenerator<T> {
/**
* Builds up the list of contextual predicates given an Object.
*/
public String[] getContext(T o);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/DataStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
/**
* A interface for objects which can deliver a stream of training data to be
* supplied to an EventStream. It is not necessary to use a DataStream in a
* Maxent application, but it can be used to support a wider variety of formats
* in which your training data can be held.
*/
public interface DataStream {
/**
* Returns the next slice of data held in this DataStream.
*
* @return the Object representing the data which is next in this DataStream
*/
Object nextToken();
/**
* Test whether there are any Events remaining in this EventStream.
*
* @return true if this DataStream has more data tokens
*/
boolean hasNext();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/GISModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.EvalParameters;
import opennlp.tools.ml.model.Prior;
import opennlp.tools.ml.model.UniformPrior;
/**
* A maximum entropy model which has been trained using the Generalized
* Iterative Scaling procedure (implemented in GIS.java).
*/
public final class GISModel extends AbstractModel {
/**
* Creates a new model with the specified parameters, outcome names, and
* predicate/feature labels.
*
* @param params
* The parameters of the model.
* @param predLabels
* The names of the predicates used in this model.
* @param outcomeNames
* The names of the outcomes this model predicts.
*/
public GISModel(Context[] params, String[] predLabels, String[] outcomeNames) {
this(params, predLabels, outcomeNames, new UniformPrior());
}
/**
* Creates a new model with the specified parameters, outcome names, and
* predicate/feature labels.
*
* @param params
* The parameters of the model.
* @param predLabels
* The names of the predicates used in this model.
* @param outcomeNames
* The names of the outcomes this model predicts.
* @param prior
* The prior to be used with this model.
*/
public GISModel(Context[] params, String[] predLabels, String[] outcomeNames, Prior prior) {
super(params, predLabels, outcomeNames);
this.prior = prior;
prior.setLabels(outcomeNames, predLabels);
modelType = ModelType.Maxent;
}
/**
* Use this model to evaluate a context and return an array of the likelihood
* of each outcome given that context.
*
* @param context
* The names of the predicates which have been observed at the
* present decision point.
* @return The normalized probabilities for the outcomes given the context.
* The indexes of the double[] are the outcome ids, and the actual
* string representation of the outcomes can be obtained from the
* method getOutcome(int i).
*/
public final double[] eval(String[] context) {
return (eval(context, new double[evalParams.getNumOutcomes()]));
}
public final double[] eval(String[] context, float[] values) {
return (eval(context, values, new double[evalParams.getNumOutcomes()]));
}
public final double[] eval(String[] context, double[] outsums) {
return eval(context, null, outsums);
}
/**
* Use this model to evaluate a context and return an array of the likelihood
* of each outcome given that context.
*
* @param context
* The names of the predicates which have been observed at the
* present decision point.
* @param outsums
* This is where the distribution is stored.
* @return The normalized probabilities for the outcomes given the context.
* The indexes of the double[] are the outcome ids, and the actual
* string representation of the outcomes can be obtained from the
* method getOutcome(int i).
*/
public final double[] eval(String[] context, float[] values, double[] outsums) {
Context[] scontexts = new Context[context.length];
for (int i = 0; i < context.length; i++) {
scontexts[i] = pmap.get(context[i]);
}
prior.logPrior(outsums, scontexts, values);
return GISModel.eval(scontexts, values, outsums, evalParams);
}
/**
* Use this model to evaluate a context and return an array of the likelihood
* of each outcome given the specified context and the specified parameters.
*
* @param context
* The integer values of the predicates which have been observed at
* the present decision point.
* @param prior
* The prior distribution for the specified context.
* @param model
* The set of parametes used in this computation.
* @return The normalized probabilities for the outcomes given the context.
* The indexes of the double[] are the outcome ids, and the actual
* string representation of the outcomes can be obtained from the
* method getOutcome(int i).
*/
public static double[] eval(int[] context, double[] prior,
EvalParameters model) {
return eval(context, null, prior, model);
}
/**
* Use this model to evaluate a context and return an array of the likelihood
* of each outcome given the specified context and the specified parameters.
*
* @param context
* The integer values of the predicates which have been observed at
* the present decision point.
* @param values
* The values for each of the parameters.
* @param prior
* The prior distribution for the specified context.
* @param model
* The set of parametes used in this computation.
* @return The normalized probabilities for the outcomes given the context.
* The indexes of the double[] are the outcome ids, and the actual
* string representation of the outcomes can be obtained from the
* method getOutcome(int i).
*/
static double[] eval(int[] context, float[] values, double[] prior,
EvalParameters model) {
Context[] scontexts = new Context[context.length];
for (int i = 0; i < context.length; i++) {
scontexts[i] = model.getParams()[context[i]];
}
return GISModel.eval(scontexts, values, prior, model);
}
/**
* Use this model to evaluate a context and return an array of the likelihood
* of each outcome given the specified context and the specified parameters.
*
* @param context
* The integer values of the predicates which have been observed at
* the present decision point.
* @param values
* The values for each of the parameters.
* @param prior
* The prior distribution for the specified context.
* @param model
* The set of parametes used in this computation.
* @return The normalized probabilities for the outcomes given the context.
* The indexes of the double[] are the outcome ids, and the actual
* string representation of the outcomes can be obtained from the
* method getOutcome(int i).
*/
static double[] eval(Context[] context, float[] values, double[] prior,
EvalParameters model) {
int[] numfeats = new int[model.getNumOutcomes()];
int[] activeOutcomes;
double[] activeParameters;
double value = 1;
for (int ci = 0; ci < context.length; ci++) {
if (context[ci] != null) {
Context predParams = context[ci];
activeOutcomes = predParams.getOutcomes();
activeParameters = predParams.getParameters();
if (values != null) {
value = values[ci];
}
for (int ai = 0; ai < activeOutcomes.length; ai++) {
int oid = activeOutcomes[ai];
numfeats[oid]++;
prior[oid] += activeParameters[ai] * value;
}
}
}
double normal = 0.0;
for (int oid = 0; oid < model.getNumOutcomes(); oid++) {
prior[oid] = Math.exp(prior[oid]);
normal += prior[oid];
}
for (int oid = 0; oid < model.getNumOutcomes(); oid++) {
prior[oid] /= normal;
}
return prior;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/GISTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.EvalParameters;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MaxentModel;
import opennlp.tools.ml.model.MutableContext;
import opennlp.tools.ml.model.OnePassDataIndexer;
import opennlp.tools.ml.model.Prior;
import opennlp.tools.ml.model.UniformPrior;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
/**
* An implementation of Generalized Iterative Scaling. The reference paper
* for this implementation was Adwait Ratnaparkhi's tech report at the
* University of Pennsylvania's Institute for Research in Cognitive Science,
* and is available at <a href ="ftp://ftp.cis.upenn.edu/pub/ircs/tr/97-08.ps.Z"><code>ftp://ftp.cis.upenn.edu/pub/ircs/tr/97-08.ps.Z</code></a>.
* <p>
* The slack parameter used in the above implementation has been removed by default
* from the computation and a method for updating with Gaussian smoothing has been
* added per Investigating GIS and Smoothing for Maximum Entropy Taggers, Clark and Curran (2002).
* <a href="http://acl.ldc.upenn.edu/E/E03/E03-1071.pdf"><code>http://acl.ldc.upenn.edu/E/E03/E03-1071.pdf</code></a>
* The slack parameter can be used by setting <code>useSlackParameter</code> to true.
* Gaussian smoothing can be used by setting <code>useGaussianSmoothing</code> to true.
* <p>
* A prior can be used to train models which converge to the distribution which minimizes the
* relative entropy between the distribution specified by the empirical constraints of the training
* data and the specified prior. By default, the uniform distribution is used as the prior.
*/
public class GISTrainer extends AbstractEventTrainer {
@Deprecated
public static final String OLD_LL_THRESHOLD_PARAM = "llthreshold";
public static final String LOG_LIKELIHOOD_THRESHOLD_PARAM = "LLThreshold";
public static final double LOG_LIKELIHOOD_THRESHOLD_DEFAULT = 0.0001;
private double llThreshold = 0.0001;
/**
* Specifies whether unseen context/outcome pairs should be estimated as occur very infrequently.
*/
private boolean useSimpleSmoothing = false;
/**
* Specified whether parameter updates should prefer a distribution of parameters which
* is gaussian.
*/
private boolean useGaussianSmoothing = false;
private double sigma = 2.0;
// If we are using smoothing, this is used as the "number" of
// times we want the trainer to imagine that it saw a feature that it
// actually didn't see. Defaulted to 0.1.
private double _smoothingObservation = 0.1;
/**
* Number of unique events which occured in the event set.
*/
private int numUniqueEvents;
/**
* Number of predicates.
*/
private int numPreds;
/**
* Number of outcomes.
*/
private int numOutcomes;
/**
* Records the array of predicates seen in each event.
*/
private int[][] contexts;
/**
* The value associated with each context. If null then context values are assumes to be 1.
*/
private float[][] values;
/**
* List of outcomes for each event i, in context[i].
*/
private int[] outcomeList;
/**
* Records the num of times an event has been seen for each event i, in context[i].
*/
private int[] numTimesEventsSeen;
/**
* Stores the String names of the outcomes. The GIS only tracks outcomes as
* ints, and so this array is needed to save the model to disk and thereby
* allow users to know what the outcome was in human understandable terms.
*/
private String[] outcomeLabels;
/**
* Stores the String names of the predicates. The GIS only tracks predicates
* as ints, and so this array is needed to save the model to disk and thereby
* allow users to know what the outcome was in human understandable terms.
*/
private String[] predLabels;
/**
* Stores the observed expected values of the features based on training data.
*/
private MutableContext[] observedExpects;
/**
* Stores the estimated parameter value of each predicate during iteration
*/
private MutableContext[] params;
/**
* Stores the expected values of the features based on the current models
*/
private MutableContext[][] modelExpects;
/**
* This is the prior distribution that the model uses for training.
*/
private Prior prior;
/**
* Initial probability for all outcomes.
*/
private EvalParameters evalParams;
public static final String MAXENT_VALUE = "MAXENT";
/**
* If we are using smoothing, this is used as the "number" of times we want
* the trainer to imagine that it saw a feature that it actually didn't see.
* Defaulted to 0.1.
*/
private static final String SMOOTHING_PARAM = "Smoothing";
private static final boolean SMOOTHING_DEFAULT = false;
private static final String SMOOTHING_OBSERVATION_PARAM = "SmoothingObservation";
private static final double SMOOTHING_OBSERVATION = 0.1;
private static final String GAUSSIAN_SMOOTHING_PARAM = "GaussianSmoothing";
private static final boolean GAUSSIAN_SMOOTHING_DEFAULT = false;
private static final String GAUSSIAN_SMOOTHING_SIGMA_PARAM = "GaussianSmoothingSigma";
private static final double GAUSSIAN_SMOOTHING_SIGMA_DEFAULT = 2.0;
/**
* Creates a new <code>GISTrainer</code> instance which does not print
* progress messages about training to STDOUT.
*/
public GISTrainer() {
printMessages = false;
}
@Override
public boolean isSortAndMerge() {
return true;
}
@Override
public void init(TrainingParameters trainingParameters, Map<String, String> reportMap) {
super.init(trainingParameters, reportMap);
// Just in case someone is using "llthreshold" instead of LLThreshold...
// this warning can be removed in a future version of OpenNLP.
if (trainingParameters.getDoubleParameter(OLD_LL_THRESHOLD_PARAM, -1.) > 0. ) {
display("WARNING: the training parameter: " + OLD_LL_THRESHOLD_PARAM +
" has been deprecated. Please use " +
LOG_LIKELIHOOD_THRESHOLD_DEFAULT + " instead");
// if they didn't supply a value for both llthreshold AND LLThreshold copy it over..
if (trainingParameters.getDoubleParameter(LOG_LIKELIHOOD_THRESHOLD_PARAM, -1.) < 0. ) {
trainingParameters.put(LOG_LIKELIHOOD_THRESHOLD_PARAM,
trainingParameters.getDoubleParameter(OLD_LL_THRESHOLD_PARAM, LOG_LIKELIHOOD_THRESHOLD_DEFAULT));
}
}
llThreshold = trainingParameters.getDoubleParameter(LOG_LIKELIHOOD_THRESHOLD_PARAM,
LOG_LIKELIHOOD_THRESHOLD_DEFAULT);
useSimpleSmoothing = trainingParameters.getBooleanParameter(SMOOTHING_PARAM, SMOOTHING_DEFAULT);
if (useSimpleSmoothing) {
_smoothingObservation =
trainingParameters.getDoubleParameter(SMOOTHING_OBSERVATION_PARAM, SMOOTHING_OBSERVATION);
}
useGaussianSmoothing =
trainingParameters.getBooleanParameter(GAUSSIAN_SMOOTHING_PARAM, GAUSSIAN_SMOOTHING_DEFAULT);
if (useGaussianSmoothing) {
sigma = trainingParameters.getDoubleParameter(
GAUSSIAN_SMOOTHING_SIGMA_PARAM, GAUSSIAN_SMOOTHING_SIGMA_DEFAULT);
}
if (useSimpleSmoothing && useGaussianSmoothing)
throw new RuntimeException("Cannot set both Gaussian smoothing and Simple smoothing");
}
@Override
public MaxentModel doTrain(DataIndexer indexer) throws IOException {
int iterations = getIterations();
int threads = trainingParameters.getIntParameter(TrainingParameters.THREADS_PARAM, 1);
AbstractModel model = trainModel(iterations, indexer, threads);
return model;
}
/**
* Creates a new <code>GISTrainer</code> instance.
*
* @param printMessages sends progress messages about training to
* STDOUT when true; trains silently otherwise.
*/
GISTrainer(boolean printMessages) {
this.printMessages = printMessages;
}
/**
* Sets whether this trainer will use smoothing while training the model.
* This can improve model accuracy, though training will potentially take
* longer and use more memory. Model size will also be larger.
*
* @param smooth true if smoothing is desired, false if not
*/
public void setSmoothing(boolean smooth) {
useSimpleSmoothing = smooth;
}
/**
* Sets whether this trainer will use smoothing while training the model.
* This can improve model accuracy, though training will potentially take
* longer and use more memory. Model size will also be larger.
*
* @param timesSeen the "number" of times we want the trainer to imagine
* it saw a feature that it actually didn't see
*/
public void setSmoothingObservation(double timesSeen) {
_smoothingObservation = timesSeen;
}
/**
* Sets whether this trainer will use smoothing while training the model.
* This can improve model accuracy, though training will potentially take
* longer and use more memory. Model size will also be larger.
*/
public void setGaussianSigma(double sigmaValue) {
useGaussianSmoothing = true;
sigma = sigmaValue;
}
/**
* Train a model using the GIS algorithm, assuming 100 iterations and no
* cutoff.
*
* @param eventStream
* The EventStream holding the data on which this model will be
* trained.
* @return The newly trained model, which can be used immediately or saved to
* disk using an opennlp.tools.ml.maxent.io.GISModelWriter object.
*/
public GISModel trainModel(ObjectStream<Event> eventStream) throws IOException {
return trainModel(eventStream, 100, 0);
}
/**
* Trains a GIS model on the event in the specified event stream, using the specified number
* of iterations and the specified count cutoff.
*
* @param eventStream A stream of all events.
* @param iterations The number of iterations to use for GIS.
* @param cutoff The number of times a feature must occur to be included.
* @return A GIS model trained with specified
*/
public GISModel trainModel(ObjectStream<Event> eventStream, int iterations,
int cutoff) throws IOException {
DataIndexer indexer = new OnePassDataIndexer();
TrainingParameters indexingParameters = new TrainingParameters();
indexingParameters.put(GISTrainer.CUTOFF_PARAM, cutoff);
indexingParameters.put(GISTrainer.ITERATIONS_PARAM, iterations);
Map<String, String> reportMap = new HashMap<>();
indexer.init(indexingParameters, reportMap);
indexer.index(eventStream);
return trainModel(iterations, indexer);
}
/**
* Train a model using the GIS algorithm.
*
* @param iterations The number of GIS iterations to perform.
* @param di The data indexer used to compress events in memory.
* @return The newly trained model, which can be used immediately or saved
* to disk using an opennlp.tools.ml.maxent.io.GISModelWriter object.
*/
public GISModel trainModel(int iterations, DataIndexer di) {
return trainModel(iterations, di, new UniformPrior(), 1);
}
/**
* Train a model using the GIS algorithm.
*
* @param iterations The number of GIS iterations to perform.
* @param di The data indexer used to compress events in memory.
* @param threads
* @return The newly trained model, which can be used immediately or saved
* to disk using an opennlp.tools.ml.maxent.io.GISModelWriter object.
*/
public GISModel trainModel(int iterations, DataIndexer di, int threads) {
return trainModel(iterations, di, new UniformPrior(), threads);
}
/**
* Train a model using the GIS algorithm.
*
* @param iterations The number of GIS iterations to perform.
* @param di The data indexer used to compress events in memory.
* @param modelPrior The prior distribution used to train this model.
* @return The newly trained model, which can be used immediately or saved
* to disk using an opennlp.tools.ml.maxent.io.GISModelWriter object.
*/
public GISModel trainModel(int iterations, DataIndexer di, Prior modelPrior, int threads) {
if (threads <= 0) {
throw new IllegalArgumentException("threads must be at least one or greater but is " + threads + "!");
}
modelExpects = new MutableContext[threads][];
/* Incorporate all of the needed info *****/
display("Incorporating indexed data for training... \n");
contexts = di.getContexts();
values = di.getValues();
/*
The number of times a predicate occured in the training data.
*/
int[] predicateCounts = di.getPredCounts();
numTimesEventsSeen = di.getNumTimesEventsSeen();
numUniqueEvents = contexts.length;
this.prior = modelPrior;
//printTable(contexts);
// determine the correction constant and its inverse
double correctionConstant = 0;
for (int ci = 0; ci < contexts.length; ci++) {
if (values == null || values[ci] == null) {
if (contexts[ci].length > correctionConstant) {
correctionConstant = contexts[ci].length;
}
} else {
float cl = values[ci][0];
for (int vi = 1; vi < values[ci].length; vi++) {
cl += values[ci][vi];
}
if (cl > correctionConstant) {
correctionConstant = cl;
}
}
}
display("done.\n");
outcomeLabels = di.getOutcomeLabels();
outcomeList = di.getOutcomeList();
numOutcomes = outcomeLabels.length;
predLabels = di.getPredLabels();
prior.setLabels(outcomeLabels, predLabels);
numPreds = predLabels.length;
display("\tNumber of Event Tokens: " + numUniqueEvents + "\n");
display("\t Number of Outcomes: " + numOutcomes + "\n");
display("\t Number of Predicates: " + numPreds + "\n");
// set up feature arrays
float[][] predCount = new float[numPreds][numOutcomes];
for (int ti = 0; ti < numUniqueEvents; ti++) {
for (int j = 0; j < contexts[ti].length; j++) {
if (values != null && values[ti] != null) {
predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti] * values[ti][j];
} else {
predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti];
}
}
}
// A fake "observation" to cover features which are not detected in
// the data. The default is to assume that we observed "1/10th" of a
// feature during training.
final double smoothingObservation = _smoothingObservation;
// Get the observed expectations of the features. Strictly speaking,
// we should divide the counts by the number of Tokens, but because of
// the way the model's expectations are approximated in the
// implementation, this is cancelled out when we compute the next
// iteration of a parameter, making the extra divisions wasteful.
params = new MutableContext[numPreds];
for (int i = 0; i < modelExpects.length; i++) {
modelExpects[i] = new MutableContext[numPreds];
}
observedExpects = new MutableContext[numPreds];
// The model does need the correction constant and the correction feature. The correction constant
// is only needed during training, and the correction feature is not necessary.
// For compatibility reasons the model contains form now on a correction constant of 1,
// and a correction param 0.
evalParams = new EvalParameters(params, numOutcomes);
int[] activeOutcomes = new int[numOutcomes];
int[] outcomePattern;
int[] allOutcomesPattern = new int[numOutcomes];
for (int oi = 0; oi < numOutcomes; oi++) {
allOutcomesPattern[oi] = oi;
}
int numActiveOutcomes;
for (int pi = 0; pi < numPreds; pi++) {
numActiveOutcomes = 0;
if (useSimpleSmoothing) {
numActiveOutcomes = numOutcomes;
outcomePattern = allOutcomesPattern;
} else { //determine active outcomes
for (int oi = 0; oi < numOutcomes; oi++) {
if (predCount[pi][oi] > 0) {
activeOutcomes[numActiveOutcomes] = oi;
numActiveOutcomes++;
}
}
if (numActiveOutcomes == numOutcomes) {
outcomePattern = allOutcomesPattern;
} else {
outcomePattern = new int[numActiveOutcomes];
System.arraycopy(activeOutcomes, 0, outcomePattern, 0, numActiveOutcomes);
}
}
params[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
for (int i = 0; i < modelExpects.length; i++) {
modelExpects[i][pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
}
observedExpects[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
for (int aoi = 0; aoi < numActiveOutcomes; aoi++) {
int oi = outcomePattern[aoi];
params[pi].setParameter(aoi, 0.0);
for (MutableContext[] modelExpect : modelExpects) {
modelExpect[pi].setParameter(aoi, 0.0);
}
if (predCount[pi][oi] > 0) {
observedExpects[pi].setParameter(aoi, predCount[pi][oi]);
} else if (useSimpleSmoothing) {
observedExpects[pi].setParameter(aoi, smoothingObservation);
}
}
}
display("...done.\n");
/* Find the parameters *****/
if (threads == 1) {
display("Computing model parameters ...\n");
} else {
display("Computing model parameters in " + threads + " threads...\n");
}
findParameters(iterations, correctionConstant);
// Create and return the model
return new GISModel(params, predLabels, outcomeLabels);
}
/* Estimate and return the model parameters. */
private void findParameters(int iterations, double correctionConstant) {
int threads = modelExpects.length;
ExecutorService executor = Executors.newFixedThreadPool(threads);
CompletionService<ModelExpectationComputeTask> completionService =
new ExecutorCompletionService<>(executor);
double prevLL = 0.0;
double currLL;
display("Performing " + iterations + " iterations.\n");
for (int i = 1; i <= iterations; i++) {
if (i < 10) {
display(" " + i + ": ");
} else if (i < 100) {
display(" " + i + ": ");
} else {
display(i + ": ");
}
currLL = nextIteration(correctionConstant, completionService);
if (i > 1) {
if (prevLL > currLL) {
System.err.println("Model Diverging: loglikelihood decreased");
break;
}
if (currLL - prevLL < llThreshold) {
break;
}
}
prevLL = currLL;
}
// kill a bunch of these big objects now that we don't need them
observedExpects = null;
modelExpects = null;
numTimesEventsSeen = null;
contexts = null;
executor.shutdown();
}
//modeled on implementation in Zhang Le's maxent kit
private double gaussianUpdate(int predicate, int oid, double correctionConstant) {
double param = params[predicate].getParameters()[oid];
double x0 = 0.0;
double modelValue = modelExpects[0][predicate].getParameters()[oid];
double observedValue = observedExpects[predicate].getParameters()[oid];
for (int i = 0; i < 50; i++) {
double tmp = modelValue * Math.exp(correctionConstant * x0);
double f = tmp + (param + x0) / sigma - observedValue;
double fp = tmp * correctionConstant + 1 / sigma;
if (fp == 0) {
break;
}
double x = x0 - f / fp;
if (Math.abs(x - x0) < 0.000001) {
x0 = x;
break;
}
x0 = x;
}
return x0;
}
/* Compute one iteration of GIS and retutn log-likelihood.*/
private double nextIteration(double correctionConstant,
CompletionService<ModelExpectationComputeTask> completionService) {
// compute contribution of p(a|b_i) for each feature and the new
// correction parameter
double loglikelihood = 0.0;
int numEvents = 0;
int numCorrect = 0;
// Each thread gets equal number of tasks, if the number of tasks
// is not divisible by the number of threads, the first "leftOver"
// threads have one extra task.
int numberOfThreads = modelExpects.length;
int taskSize = numUniqueEvents / numberOfThreads;
int leftOver = numUniqueEvents % numberOfThreads;
// submit all tasks to the completion service.
for (int i = 0; i < numberOfThreads; i++) {
if (i < leftOver) {
completionService.submit(new ModelExpectationComputeTask(i, i * taskSize + i,
taskSize + 1));
} else {
completionService.submit(new ModelExpectationComputeTask(i,
i * taskSize + leftOver, taskSize));
}
}
for (int i = 0; i < numberOfThreads; i++) {
ModelExpectationComputeTask finishedTask;
try {
finishedTask = completionService.take().get();
} catch (InterruptedException e) {
// TODO: We got interrupted, but that is currently not really supported!
// For now we just print the exception and fail hard. We hopefully soon
// handle this case properly!
e.printStackTrace();
throw new IllegalStateException("Interruption is not supported!", e);
} catch (ExecutionException e) {
// Only runtime exception can be thrown during training, if one was thrown
// it should be re-thrown. That could for example be a NullPointerException
// which is caused through a bug in our implementation.
throw new RuntimeException("Exception during training: " + e.getMessage(), e);
}
// When they are done, retrieve the results ...
numEvents += finishedTask.getNumEvents();
numCorrect += finishedTask.getNumCorrect();
loglikelihood += finishedTask.getLoglikelihood();
}
display(".");
// merge the results of the two computations
for (int pi = 0; pi < numPreds; pi++) {
int[] activeOutcomes = params[pi].getOutcomes();
for (int aoi = 0; aoi < activeOutcomes.length; aoi++) {
for (int i = 1; i < modelExpects.length; i++) {
modelExpects[0][pi].updateParameter(aoi, modelExpects[i][pi].getParameters()[aoi]);
}
}
}
display(".");
// compute the new parameter values
for (int pi = 0; pi < numPreds; pi++) {
double[] observed = observedExpects[pi].getParameters();
double[] model = modelExpects[0][pi].getParameters();
int[] activeOutcomes = params[pi].getOutcomes();
for (int aoi = 0; aoi < activeOutcomes.length; aoi++) {
if (useGaussianSmoothing) {
params[pi].updateParameter(aoi, gaussianUpdate(pi, aoi, correctionConstant));
} else {
if (model[aoi] == 0) {
System.err.println("Model expects == 0 for " + predLabels[pi] + " " + outcomeLabels[aoi]);
}
//params[pi].updateParameter(aoi,(Math.log(observed[aoi]) - Math.log(model[aoi])));
params[pi].updateParameter(aoi, ((Math.log(observed[aoi]) - Math.log(model[aoi]))
/ correctionConstant));
}
for (MutableContext[] modelExpect : modelExpects) {
modelExpect[pi].setParameter(aoi, 0.0); // re-initialize to 0.0's
}
}
}
display(". loglikelihood=" + loglikelihood + "\t" + ((double) numCorrect / numEvents) + "\n");
return loglikelihood;
}
protected void display(String s) {
if (printMessages) {
System.out.print(s);
}
}
private class ModelExpectationComputeTask implements Callable<ModelExpectationComputeTask> {
private final int startIndex;
private final int length;
final private int threadIndex;
private double loglikelihood = 0;
private int numEvents = 0;
private int numCorrect = 0;
// startIndex to compute, number of events to compute
ModelExpectationComputeTask(int threadIndex, int startIndex, int length) {
this.startIndex = startIndex;
this.length = length;
this.threadIndex = threadIndex;
}
public ModelExpectationComputeTask call() {
final double[] modelDistribution = new double[numOutcomes];
for (int ei = startIndex; ei < startIndex + length; ei++) {
// TODO: check interruption status here, if interrupted set a poisoned flag and return
if (values != null) {
prior.logPrior(modelDistribution, contexts[ei], values[ei]);
GISModel.eval(contexts[ei], values[ei], modelDistribution, evalParams);
} else {
prior.logPrior(modelDistribution, contexts[ei]);
GISModel.eval(contexts[ei], modelDistribution, evalParams);
}
for (int j = 0; j < contexts[ei].length; j++) {
int pi = contexts[ei][j];
int[] activeOutcomes = modelExpects[threadIndex][pi].getOutcomes();
for (int aoi = 0; aoi < activeOutcomes.length; aoi++) {
int oi = activeOutcomes[aoi];
// numTimesEventsSeen must also be thread safe
if (values != null && values[ei] != null) {
modelExpects[threadIndex][pi].updateParameter(aoi, modelDistribution[oi]
* values[ei][j] * numTimesEventsSeen[ei]);
} else {
modelExpects[threadIndex][pi].updateParameter(aoi, modelDistribution[oi]
* numTimesEventsSeen[ei]);
}
}
}
loglikelihood += Math.log(modelDistribution[outcomeList[ei]]) * numTimesEventsSeen[ei];
numEvents += numTimesEventsSeen[ei];
if (printMessages) {
int max = 0;
for (int oi = 1; oi < numOutcomes; oi++) {
if (modelDistribution[oi] > modelDistribution[max]) {
max = oi;
}
}
if (max == outcomeList[ei]) {
numCorrect += numTimesEventsSeen[ei];
}
}
}
return this;
}
synchronized int getNumEvents() {
return numEvents;
}
synchronized int getNumCorrect() {
return numCorrect;
}
synchronized double getLoglikelihood() {
return loglikelihood;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/RealBasicEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent;
import java.io.IOException;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.RealValueFileEventStream;
import opennlp.tools.util.ObjectStream;
public class RealBasicEventStream implements ObjectStream<Event> {
ContextGenerator<String> cg = new BasicContextGenerator();
private ObjectStream<String> ds;
public RealBasicEventStream(ObjectStream<String> ds) {
this.ds = ds;
}
public Event read() throws IOException {
String eventString = ds.read();
if (eventString != null) {
return createEvent(eventString);
}
return null;
}
private Event createEvent(String obs) {
int lastSpace = obs.lastIndexOf(' ');
if (lastSpace == -1)
return null;
else {
String[] contexts = obs.substring(0,lastSpace).split("\\s+");
float[] values = RealValueFileEventStream.parseContexts(contexts);
return new Event(obs.substring(lastSpace + 1),contexts,values);
}
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
ds.reset();
}
@Override
public void close() throws IOException {
ds.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/BinaryGISModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.DataInputStream;
import opennlp.tools.ml.model.BinaryFileDataReader;
/**
* A reader for GIS models stored in binary format.
*/
public class BinaryGISModelReader extends GISModelReader {
/**
* Constructor which directly instantiates the DataInputStream containing the
* model contents.
*
* @param dis
* The DataInputStream containing the model information.
*/
public BinaryGISModelReader(DataInputStream dis) {
super(new BinaryFileDataReader(dis));
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/BinaryGISModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.GZIPOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.model.AbstractModel;
/**
* Model writer that saves models in binary format.
*/
public class BinaryGISModelWriter extends GISModelWriter {
private EncryptedDataOutputStream output;
/**
* Constructor which takes a GISModel and a File and prepares itself to write
* the model to that file. Detects whether the file is gzipped or not based on
* whether the suffix contains ".gz".
*
* @param model
* The GISModel which is to be persisted.
* @param f
* The File in which the model is to be persisted.
*/
public BinaryGISModelWriter(AbstractModel model, File f) throws IOException {
super(model);
if (f.getName().endsWith(".gz")) {
output = new EncryptedDataOutputStream(new GZIPOutputStream(
new FileOutputStream(f)));
} else {
output = new EncryptedDataOutputStream(new FileOutputStream(f));
}
}
/**
* Constructor which takes a GISModel and a DataOutputStream and prepares
* itself to write the model to that stream.
*
* @param model
* The GISModel which is to be persisted.
* @param dos
* The stream which will be used to persist the model.
*/
public BinaryGISModelWriter(AbstractModel model, EncryptedDataOutputStream dos) {
super(model);
output = dos;
}
public void writeUTF(String s) throws java.io.IOException {
output.writeEncryptedUTF(s);
}
public void writeInt(int i) throws java.io.IOException {
output.writeInt(i);
}
public void writeDouble(double d) throws java.io.IOException {
output.writeDouble(d);
}
public void close() throws java.io.IOException {
output.flush();
output.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/BinaryQNModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.DataInputStream;
import opennlp.tools.ml.model.BinaryFileDataReader;
/**
* A reader for quasi-newton models stored in binary format.
*/
public class BinaryQNModelReader extends QNModelReader {
/**
* Constructor which directly instantiates the DataInputStream containing the
* model contents.
*
* @param dis
* The DataInputStream containing the model information.
*/
public BinaryQNModelReader(DataInputStream dis) {
super(new BinaryFileDataReader(dis));
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/BinaryQNModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.GZIPOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.model.AbstractModel;
public class BinaryQNModelWriter extends QNModelWriter {
protected EncryptedDataOutputStream output;
/**
* Constructor which takes a GISModel and a File and prepares itself to write
* the model to that file. Detects whether the file is gzipped or not based on
* whether the suffix contains ".gz".
*
* @param model
* The GISModel which is to be persisted.
* @param f
* The File in which the model is to be persisted.
*/
public BinaryQNModelWriter(AbstractModel model, File f) throws IOException {
super(model);
if (f.getName().endsWith(".gz")) {
output = new EncryptedDataOutputStream(new GZIPOutputStream(new FileOutputStream(f)));
} else {
output = new EncryptedDataOutputStream(new FileOutputStream(f));
}
}
/**
* Constructor which takes a GISModel and a DataOutputStream and prepares
* itself to write the model to that stream.
*
* @param model
* The GISModel which is to be persisted.
* @param dos
* The stream which will be used to persist the model.
*/
public BinaryQNModelWriter(AbstractModel model, EncryptedDataOutputStream dos) {
super(model);
output = dos;
}
public void writeUTF(String s) throws IOException {
output.writeEncryptedUTF(s);
}
public void writeInt(int i) throws IOException {
output.writeInt(i);
}
public void writeDouble(double d) throws IOException {
output.writeDouble(d);
}
public void close() throws IOException {
output.flush();
output.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/GISModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.maxent.GISModel;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelReader;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.DataReader;
/**
* Abstract parent class for readers of GISModels.
*/
public class GISModelReader extends AbstractModelReader {
public GISModelReader(File file) throws IOException {
super(file);
}
public GISModelReader(DataReader dataReader) {
super(dataReader);
}
/**
* Retrieve a model from disk. It assumes that models are saved in the
* following sequence:
*
* <br>
* GIS (model type identifier) <br>
* 1. # of parameters (int) <br>
* 2. the correction constant (int) <br>
* 3. the correction constant parameter (double) <br>
* 4. # of outcomes (int) <br>
* * list of outcome names (String) <br>
* 5. # of different types of outcome patterns (int) <br>
* * list of (int int[]) <br>
* [# of predicates for which outcome pattern is true] [outcome pattern] <br>
* 6. # of predicates (int) <br>
* * list of predicate names (String)
*
* <p>
* If you are creating a reader for a format which won't work with this
* (perhaps a database or xml file), override this method and ignore the other
* methods provided in this abstract class.
*
* @return The GISModel stored in the format and location specified to this
* GISModelReader (usually via its the constructor).
*/
public AbstractModel constructModel() throws IOException {
// read correction constant (not used anymore)
readInt();
// read correction params (not used anymore)
readDouble();
String[] outcomeLabels = getOutcomes();
int[][] outcomePatterns = getOutcomePatterns();
String[] predLabels = getPredicates();
Context[] params = getParameters(outcomePatterns);
return new GISModel(params, predLabels, outcomeLabels);
}
public void checkModelType() throws java.io.IOException {
String modelType = readUTF();
if (!modelType.equals("GIS"))
System.out.println("Error: attempting to load a " + modelType
+ " model as a GIS model." + " You should expect problems.");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/GISModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelWriter;
import opennlp.tools.ml.model.ComparablePredicate;
import opennlp.tools.ml.model.Context;
/**
* Abstract parent class for GISModel writers. It provides the persist method
* which takes care of the structure of a stored document, and requires an
* extending class to define precisely how the data should be stored.
*/
public abstract class GISModelWriter extends AbstractModelWriter {
protected Context[] PARAMS;
protected String[] OUTCOME_LABELS;
protected String[] PRED_LABELS;
public GISModelWriter(AbstractModel model) {
Object[] data = model.getDataStructures();
@SuppressWarnings("unchecked")
Map<String, Context> pmap = (Map<String, Context>) data[1];
OUTCOME_LABELS = (String[]) data[2];
PARAMS = new Context[pmap.size()];
PRED_LABELS = new String[pmap.size()];
int i = 0;
for (Map.Entry<String, Context> pred : pmap.entrySet()) {
PRED_LABELS[i] = pred.getKey();
PARAMS[i] = pred.getValue();
i++;
}
}
/**
* Writes the model to disk, using the <code>writeX()</code> methods provided
* by extending classes.
*
* <p>
* If you wish to create a GISModelWriter which uses a different structure, it
* will be necessary to override the persist method in addition to
* implementing the <code>writeX()</code> methods.
*/
public void persist() throws IOException {
// the type of model (GIS)
writeUTF("GIS");
// the value of the correction constant (not used anymore)
writeInt(1);
// the value of the correction params (not used anymore)
writeDouble(1);
// the mapping from outcomes to their integer indexes
writeInt(OUTCOME_LABELS.length);
for (String OUTCOME_LABEL : OUTCOME_LABELS) {
writeUTF(OUTCOME_LABEL);
}
// the mapping from predicates to the outcomes they contributed to.
// The sorting is done so that we actually can write this out more
// compactly than as the entire list.
ComparablePredicate[] sorted = sortValues();
List<List<ComparablePredicate>> compressed = compressOutcomes(sorted);
writeInt(compressed.size());
for (List<ComparablePredicate> aCompressed : compressed) {
writeUTF(aCompressed.size() + ((List<?>) aCompressed).get(0).toString());
}
// the mapping from predicate names to their integer indexes
writeInt(PARAMS.length);
for (ComparablePredicate aSorted : sorted) {
writeUTF(aSorted.name);
}
// write out the parameters
for (ComparablePredicate aSorted : sorted) {
for (int j = 0; j < aSorted.params.length; j++) {
writeDouble(aSorted.params[j]);
}
}
close();
}
protected ComparablePredicate[] sortValues() {
ComparablePredicate[] sortPreds = new ComparablePredicate[PARAMS.length];
int numParams = 0;
for (int pid = 0; pid < PARAMS.length; pid++) {
int[] predkeys = PARAMS[pid].getOutcomes();
// Arrays.sort(predkeys);
int numActive = predkeys.length;
double[] activeParams = PARAMS[pid].getParameters();
numParams += numActive;
/*
* double[] activeParams = new double[numActive];
*
* int id = 0; for (int i=0; i < predkeys.length; i++) { int oid =
* predkeys[i]; activeOutcomes[id] = oid; activeParams[id] =
* PARAMS[pid].getParams(oid); id++; }
*/
sortPreds[pid] = new ComparablePredicate(PRED_LABELS[pid],
predkeys, activeParams);
}
Arrays.sort(sortPreds);
return sortPreds;
}
protected List<List<ComparablePredicate>> compressOutcomes(ComparablePredicate[] sorted) {
List<List<ComparablePredicate>> outcomePatterns = new ArrayList<>();
if (sorted.length > 0) {
ComparablePredicate cp = sorted[0];
List<ComparablePredicate> newGroup = new ArrayList<>();
for (int i = 0; i < sorted.length; i++) {
if (cp.compareTo(sorted[i]) == 0) {
newGroup.add(sorted[i]);
} else {
cp = sorted[i];
outcomePatterns.add(newGroup);
newGroup = new ArrayList<>();
newGroup.add(sorted[i]);
}
}
outcomePatterns.add(newGroup);
}
return outcomePatterns;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/QNModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.maxent.quasinewton.QNModel;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.DataReader;
public class QNModelReader extends GISModelReader {
public QNModelReader(DataReader dataReader) {
super(dataReader);
}
public QNModelReader(File file) throws IOException {
super(file);
}
@Override
public void checkModelType() throws IOException {
String modelType = readUTF();
if (!modelType.equals("QN"))
System.out.println("Error: attempting to load a " + modelType
+ " model as a MAXENT_QN model." + " You should expect problems.");
}
public QNModel constructModel() throws IOException {
String[] outcomeLabels = getOutcomes();
int[][] outcomePatterns = getOutcomePatterns();
String[] predLabels = getPredicates();
Context[] params = getParameters(outcomePatterns);
return new QNModel(params, predLabels, outcomeLabels);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/io/QNModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.io;
import java.io.IOException;
import java.util.List;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.ComparablePredicate;
public abstract class QNModelWriter extends GISModelWriter {
public QNModelWriter(AbstractModel model) {
super(model);
}
@Override
public void persist() throws IOException {
// the type of model (QN)
writeUTF("QN");
// the mapping from outcomes to their integer indexes
writeInt(OUTCOME_LABELS.length);
for (int i = 0; i < OUTCOME_LABELS.length; i++)
writeUTF(OUTCOME_LABELS[i]);
// the mapping from predicates to the outcomes they contributed to.
// The sorting is done so that we actually can write this out more
// compactly than as the entire list.
ComparablePredicate[] sorted = sortValues();
List<List<ComparablePredicate>> compressed = compressOutcomes(sorted);
writeInt(compressed.size());
for (int i = 0; i < compressed.size(); i++) {
List<ComparablePredicate> a = compressed.get(i);
writeUTF(a.size() + a.get(0).toString());
}
// the mapping from predicate names to their integer indexes
writeInt(PARAMS.length);
for (int i = 0; i < sorted.length; i++)
writeUTF(sorted[i].name);
// write out the parameters
for (int i = 0; i < sorted.length; i++)
for (int j = 0; j < sorted[i].params.length; j++)
writeDouble(sorted[i].params[j]);
close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/ArrayMath.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import java.util.List;
/**
* Utility class for simple vector arithmetic.
*/
public class ArrayMath {
public static double innerProduct(double[] vecA, double[] vecB) {
if (vecA == null || vecB == null || vecA.length != vecB.length)
return Double.NaN;
double product = 0.0;
for (int i = 0; i < vecA.length; i++) {
product += vecA[i] * vecB[i];
}
return product;
}
/**
* L1-norm
*/
public static double l1norm(double[] v) {
double norm = 0;
for (int i = 0; i < v.length; i++)
norm += Math.abs(v[i]);
return norm;
}
/**
* L2-norm
*/
public static double l2norm(double[] v) {
return Math.sqrt(innerProduct(v, v));
}
/**
* Inverse L2-norm
*/
public static double invL2norm(double[] v) {
return 1 / l2norm(v);
}
/**
* Computes \log(\sum_{i=1}^n e^{x_i}) using a maximum-element trick
* to avoid arithmetic overflow.
*
* @param x input vector
* @return log-sum of exponentials of vector elements
*/
public static double logSumOfExps(double[] x) {
double max = max(x);
double sum = 0.0;
for (int i = 0; i < x.length; i++) {
if (x[i] != Double.NEGATIVE_INFINITY)
sum += Math.exp(x[i] - max);
}
return max + Math.log(sum);
}
public static double max(double[] x) {
int maxIdx = maxIdx(x);
return x[maxIdx];
}
/**
* Find index of maximum element in the vector x
* @param x input vector
* @return index of the maximum element. Index of the first
* maximum element is returned if multiple maximums are found.
*/
public static int maxIdx(double[] x) {
if (x == null || x.length == 0) {
throw new IllegalArgumentException("Vector x is null or empty");
}
int maxIdx = 0;
for (int i = 1; i < x.length; i++) {
if (x[maxIdx] < x[i])
maxIdx = i;
}
return maxIdx;
}
// === Not really related to math ===
/**
* Convert a list of Double objects into an array of primitive doubles
*/
public static double[] toDoubleArray(List<Double> list) {
double[] arr = new double[list.size()];
for (int i = 0; i < arr.length; i++) {
arr[i] = list.get(i);
}
return arr;
}
/**
* Convert a list of Integer objects into an array of primitive integers
*/
public static int[] toIntArray(List<Integer> list) {
int[] arr = new int[list.size()];
for (int i = 0; i < arr.length; i++) {
arr[i] = list.get(i);
}
return arr;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/Function.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
/**
* Interface for a function
*/
public interface Function {
int getDimension();
double valueAt(double[] x);
double[] gradientAt(double[] x);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/LineSearch.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
/**
* Class that performs line search to find minimum
*/
public class LineSearch {
private static final double C = 0.0001;
private static final double RHO = 0.5; // decrease of step size (must be from 0 to 1)
/**
* Backtracking line search (see Nocedal & Wright 2006, Numerical Optimization, p. 37)
*/
public static void doLineSearch(Function function,
double[] direction, LineSearchResult lsr, double initialStepSize)
{
double stepSize = initialStepSize;
int currFctEvalCount = lsr.getFctEvalCount();
double[] x = lsr.getNextPoint();
double[] gradAtX = lsr.getGradAtNext();
double valueAtX = lsr.getValueAtNext();
int dimension = x.length;
// Retrieve current points and gradient for array reuse purpose
double[] nextPoint = lsr.getCurrPoint();
double[] gradAtNextPoint = lsr.getGradAtCurr();
double valueAtNextPoint;
double dirGradientAtX = ArrayMath.innerProduct(direction, gradAtX);
// To avoid recomputing in the loop
double cachedProd = C * dirGradientAtX;
while (true) {
// Get next point
for (int i = 0; i < dimension; i++) {
nextPoint[i] = x[i] + direction[i] * stepSize;
}
// New value
valueAtNextPoint = function.valueAt(nextPoint);
currFctEvalCount++;
// Check Armijo condition
if (valueAtNextPoint <= valueAtX + cachedProd * stepSize)
break;
// Shrink step size
stepSize *= RHO;
}
// Compute and save gradient at the new point
System.arraycopy(function.gradientAt(nextPoint), 0, gradAtNextPoint, 0,
gradAtNextPoint.length);
// Update line search result
lsr.setAll(stepSize, valueAtX, valueAtNextPoint,
gradAtX, gradAtNextPoint, x, nextPoint, currFctEvalCount);
}
/**
* Constrained line search (see section 3.2 in the paper "Scalable Training
* of L1-Regularized Log-Linear Models", Andrew et al. 2007)
*/
public static void doConstrainedLineSearch(Function function,
double[] direction, LineSearchResult lsr, double l1Cost, double initialStepSize)
{
double stepSize = initialStepSize;
int currFctEvalCount = lsr.getFctEvalCount();
double[] x = lsr.getNextPoint();
double[] signX = lsr.getSignVector(); // existing sign vector
double[] gradAtX = lsr.getGradAtNext();
double[] pseudoGradAtX = lsr.getPseudoGradAtNext();
double valueAtX = lsr.getValueAtNext();
int dimension = x.length;
// Retrieve current points and gradient for array reuse purpose
double[] nextPoint = lsr.getCurrPoint();
double[] gradAtNextPoint = lsr.getGradAtCurr();
double valueAtNextPoint;
double dirGradientAtX;
// New sign vector
for (int i = 0; i < dimension; i++) {
signX[i] = x[i] == 0 ? -pseudoGradAtX[i] : x[i];
}
while (true) {
// Get next point
for (int i = 0; i < dimension; i++) {
nextPoint[i] = x[i] + direction[i] * stepSize;
}
// Projection
for (int i = 0; i < dimension; i++) {
if (nextPoint[i] * signX[i] <= 0)
nextPoint[i] = 0;
}
// New value
valueAtNextPoint = function.valueAt(nextPoint) +
l1Cost * ArrayMath.l1norm(nextPoint);
currFctEvalCount++;
dirGradientAtX = 0;
for (int i = 0; i < dimension; i++) {
dirGradientAtX += (nextPoint[i] - x[i]) * pseudoGradAtX[i];
}
// Check the sufficient decrease condition
if (valueAtNextPoint <= valueAtX + C * dirGradientAtX)
break;
// Shrink step size
stepSize *= RHO;
}
// Compute and save gradient at the new point
System.arraycopy(function.gradientAt(nextPoint), 0, gradAtNextPoint, 0,
gradAtNextPoint.length);
// Update line search result
lsr.setAll(stepSize, valueAtX, valueAtNextPoint, gradAtX,
gradAtNextPoint, pseudoGradAtX, x, nextPoint, signX, currFctEvalCount);
}
// ------------------------------------------------------------------------------------- //
/**
* Class to store lineSearch result
*/
public static class LineSearchResult {
private int fctEvalCount;
private double stepSize;
private double valueAtCurr;
private double valueAtNext;
private double[] gradAtCurr;
private double[] gradAtNext;
private double[] pseudoGradAtNext;
private double[] currPoint;
private double[] nextPoint;
private double[] signVector;
/**
* Constructor
*/
public LineSearchResult(
double stepSize,
double valueAtCurr,
double valueAtNext,
double[] gradAtCurr,
double[] gradAtNext,
double[] currPoint,
double[] nextPoint,
int fctEvalCount)
{
setAll(stepSize, valueAtCurr, valueAtNext, gradAtCurr, gradAtNext,
currPoint, nextPoint, fctEvalCount);
}
/**
* Constructor with sign vector
*/
public LineSearchResult(
double stepSize,
double valueAtCurr,
double valueAtNext,
double[] gradAtCurr,
double[] gradAtNext,
double[] pseudoGradAtNext,
double[] currPoint,
double[] nextPoint,
double[] signVector,
int fctEvalCount)
{
setAll(stepSize, valueAtCurr, valueAtNext, gradAtCurr, gradAtNext,
pseudoGradAtNext, currPoint, nextPoint, signVector, fctEvalCount);
}
/**
* Update line search elements
*/
public void setAll(
double stepSize,
double valueAtCurr,
double valueAtNext,
double[] gradAtCurr,
double[] gradAtNext,
double[] currPoint,
double[] nextPoint,
int fctEvalCount)
{
setAll(stepSize, valueAtCurr, valueAtNext, gradAtCurr, gradAtNext,
null, currPoint, nextPoint, null, fctEvalCount);
}
/**
* Update line search elements
*/
public void setAll(
double stepSize,
double valueAtCurr,
double valueAtNext,
double[] gradAtCurr,
double[] gradAtNext,
double[] pseudoGradAtNext,
double[] currPoint,
double[] nextPoint,
double[] signVector,
int fctEvalCount)
{
this.stepSize = stepSize;
this.valueAtCurr = valueAtCurr;
this.valueAtNext = valueAtNext;
this.gradAtCurr = gradAtCurr;
this.gradAtNext = gradAtNext;
this.pseudoGradAtNext = pseudoGradAtNext;
this.currPoint = currPoint;
this.nextPoint = nextPoint;
this.signVector = signVector;
this.fctEvalCount = fctEvalCount;
}
public double getFuncChangeRate() {
return (valueAtCurr - valueAtNext) / valueAtCurr;
}
public double getStepSize() {
return stepSize;
}
public void setStepSize(double stepSize) {
this.stepSize = stepSize;
}
public double getValueAtCurr() {
return valueAtCurr;
}
public void setValueAtCurr(double valueAtCurr) {
this.valueAtCurr = valueAtCurr;
}
public double getValueAtNext() {
return valueAtNext;
}
public void setValueAtNext(double valueAtNext) {
this.valueAtNext = valueAtNext;
}
public double[] getGradAtCurr() {
return gradAtCurr;
}
public void setGradAtCurr(double[] gradAtCurr) {
this.gradAtCurr = gradAtCurr;
}
public double[] getGradAtNext() {
return gradAtNext;
}
public void setGradAtNext(double[] gradAtNext) {
this.gradAtNext = gradAtNext;
}
public double[] getPseudoGradAtNext() {
return pseudoGradAtNext;
}
public void setPseudoGradAtNext(double[] pseudoGradAtNext) {
this.pseudoGradAtNext = pseudoGradAtNext;
}
public double[] getCurrPoint() {
return currPoint;
}
public void setCurrPoint(double[] currPoint) {
this.currPoint = currPoint;
}
public double[] getNextPoint() {
return nextPoint;
}
public void setNextPoint(double[] nextPoint) {
this.nextPoint = nextPoint;
}
public double[] getSignVector() {
return signVector;
}
public void setSignVector(double[] signVector) {
this.signVector = signVector;
}
public int getFctEvalCount() {
return fctEvalCount;
}
public void setFctEvalCount(int fctEvalCount) {
this.fctEvalCount = fctEvalCount;
}
/**
* Initial linear search object.
*/
public static LineSearchResult getInitialObject(
double valueAtX,
double[] gradAtX,
double[] x)
{
return getInitialObject(valueAtX, gradAtX, null, x, null, 0);
}
/**
* Initial linear search object for L1-regularization.
*/
public static LineSearchResult getInitialObjectForL1(
double valueAtX,
double[] gradAtX,
double[] pseudoGradAtX,
double[] x)
{
return getInitialObject(valueAtX, gradAtX, pseudoGradAtX, x, new double[x.length], 0);
}
public static LineSearchResult getInitialObject(
double valueAtX,
double[] gradAtX,
double[] pseudoGradAtX,
double[] x,
double[] signX,
int fctEvalCount) {
return new LineSearchResult(0.0, 0.0, valueAtX, new double[x.length], gradAtX,
pseudoGradAtX, new double[x.length], x, signX, fctEvalCount);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import java.util.Arrays;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.OnePassRealValueDataIndexer;
/**
* Evaluate negative log-likelihood and its gradient from DataIndexer.
*/
public class NegLogLikelihood implements Function {
protected int dimension;
protected int numOutcomes;
protected int numFeatures;
protected int numContexts;
// Information from data index
protected final float[][] values;
protected final int[][] contexts;
protected final int[] outcomeList;
protected final int[] numTimesEventsSeen;
// For calculating negLogLikelihood and gradient
protected double[] tempSums;
protected double[] expectation;
protected double[] gradient;
public NegLogLikelihood(DataIndexer indexer) {
// Get data from indexer.
if (indexer instanceof OnePassRealValueDataIndexer) {
this.values = indexer.getValues();
} else {
this.values = null;
}
this.contexts = indexer.getContexts();
this.outcomeList = indexer.getOutcomeList();
this.numTimesEventsSeen = indexer.getNumTimesEventsSeen();
this.numOutcomes = indexer.getOutcomeLabels().length;
this.numFeatures = indexer.getPredLabels().length;
this.numContexts = this.contexts.length;
this.dimension = numOutcomes * numFeatures;
this.expectation = new double[numOutcomes];
this.tempSums = new double[numOutcomes];
this.gradient = new double[dimension];
}
public int getDimension() {
return this.dimension;
}
public double[] getInitialPoint() {
return new double[dimension];
}
/**
* Negative log-likelihood
*/
public double valueAt(double[] x) {
if (x.length != dimension)
throw new IllegalArgumentException(
"x is invalid, its dimension is not equal to domain dimension.");
int ci, oi, ai, vectorIndex, outcome;
double predValue, logSumOfExps;
double negLogLikelihood = 0;
for (ci = 0; ci < numContexts; ci++) {
for (oi = 0; oi < numOutcomes; oi++) {
tempSums[oi] = 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
tempSums[oi] += predValue * x[vectorIndex];
}
}
logSumOfExps = ArrayMath.logSumOfExps(tempSums);
outcome = outcomeList[ci];
negLogLikelihood -= (tempSums[outcome] - logSumOfExps) * numTimesEventsSeen[ci];
}
return negLogLikelihood;
}
/**
* Compute gradient
*/
public double[] gradientAt(double[] x) {
if (x.length != dimension)
throw new IllegalArgumentException(
"x is invalid, its dimension is not equal to the function.");
int ci, oi, ai, vectorIndex;
double predValue, logSumOfExps;
int empirical;
// Reset gradient
Arrays.fill(gradient, 0);
for (ci = 0; ci < numContexts; ci++) {
for (oi = 0; oi < numOutcomes; oi++) {
expectation[oi] = 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
expectation[oi] += predValue * x[vectorIndex];
}
}
logSumOfExps = ArrayMath.logSumOfExps(expectation);
for (oi = 0; oi < numOutcomes; oi++) {
expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
}
for (oi = 0; oi < numOutcomes; oi++) {
empirical = outcomeList[ci] == oi ? 1 : 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
gradient[vectorIndex] +=
predValue * (expectation[oi] - empirical) * numTimesEventsSeen[ci];
}
}
}
return gradient;
}
protected int indexOf(int outcomeId, int featureId) {
return outcomeId * numFeatures + featureId;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import opennlp.tools.ml.model.DataIndexer;
/**
* Evaluate negative log-likelihood and its gradient in parallel
*/
public class ParallelNegLogLikelihood extends NegLogLikelihood {
// Number of threads
private int threads;
// Partial value of negative log-likelihood to be computed by each thread
private double[] negLogLikelihoodThread;
// Partial gradient
private double[][] gradientThread;
public ParallelNegLogLikelihood(DataIndexer indexer, int threads) {
super(indexer);
if (threads <= 0)
throw new IllegalArgumentException(
"Number of threads must 1 or larger");
this.threads = threads;
this.negLogLikelihoodThread = new double[threads];
this.gradientThread = new double[threads][dimension];
}
/**
* Negative log-likelihood
*/
@Override
public double valueAt(double[] x) {
if (x.length != dimension)
throw new IllegalArgumentException(
"x is invalid, its dimension is not equal to domain dimension.");
// Compute partial value of negative log-likelihood in each thread
computeInParallel(x, NegLLComputeTask.class);
double negLogLikelihood = 0;
for (int t = 0; t < threads; t++) {
negLogLikelihood += negLogLikelihoodThread[t];
}
return negLogLikelihood;
}
/**
* Compute gradient
*/
@Override
public double[] gradientAt(double[] x) {
if (x.length != dimension)
throw new IllegalArgumentException(
"x is invalid, its dimension is not equal to the function.");
// Compute partial gradient in each thread
computeInParallel(x, GradientComputeTask.class);
// Accumulate gradient
for (int i = 0; i < dimension; i++) {
gradient[i] = 0;
for (int t = 0; t < threads; t++) {
gradient[i] += gradientThread[t][i];
}
}
return gradient;
}
/**
* Compute tasks in parallel
*/
private void computeInParallel(double[] x, Class<? extends ComputeTask> taskClass) {
ExecutorService executor = Executors.newFixedThreadPool(threads);
int taskSize = numContexts / threads;
int leftOver = numContexts % threads;
try {
Constructor<? extends ComputeTask> cons = taskClass.getConstructor(
ParallelNegLogLikelihood.class,
int.class, int.class, int.class, double[].class);
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < threads; i++) {
if (i != threads - 1)
futures.add(executor.submit(
cons.newInstance(this, i, i * taskSize, taskSize, x)));
else
futures.add(executor.submit(
cons.newInstance(this, i, i * taskSize, taskSize + leftOver, x)));
}
for (Future<?> future: futures)
future.get();
} catch (Exception e) {
e.printStackTrace();
}
executor.shutdown();
}
/**
* Task that is computed in parallel
*/
abstract class ComputeTask implements Callable<ComputeTask> {
final int threadIndex;
// Start index of contexts to compute
final int startIndex;
// Number of contexts to compute
final int length;
final double[] x;
public ComputeTask(int threadIndex, int startIndex, int length, double[] x) {
this.threadIndex = threadIndex;
this.startIndex = startIndex;
this.length = length;
this.x = x;
}
}
/**
* Task for computing partial value of negative log-likelihood
*/
class NegLLComputeTask extends ComputeTask {
final double[] tempSums;
public NegLLComputeTask(int threadIndex, int startIndex, int length, double[] x) {
super(threadIndex, startIndex, length, x);
this.tempSums = new double[numOutcomes];
}
@Override
public NegLLComputeTask call() {
int ci, oi, ai, vectorIndex, outcome;
double predValue, logSumOfExps;
negLogLikelihoodThread[threadIndex] = 0;
for (ci = startIndex; ci < startIndex + length; ci++) {
for (oi = 0; oi < numOutcomes; oi++) {
tempSums[oi] = 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
tempSums[oi] += predValue * x[vectorIndex];
}
}
logSumOfExps = ArrayMath.logSumOfExps(tempSums);
outcome = outcomeList[ci];
negLogLikelihoodThread[threadIndex] -=
(tempSums[outcome] - logSumOfExps) * numTimesEventsSeen[ci];
}
return this;
}
}
/**
* Task for computing partial gradient
*/
class GradientComputeTask extends ComputeTask {
final double[] expectation;
public GradientComputeTask(int threadIndex, int startIndex, int length, double[] x) {
super(threadIndex, startIndex, length, x);
this.expectation = new double[numOutcomes];
}
@Override
public GradientComputeTask call() {
int ci, oi, ai, vectorIndex;
double predValue, logSumOfExps;
int empirical;
// Reset gradientThread
Arrays.fill(gradientThread[threadIndex], 0);
for (ci = startIndex; ci < startIndex + length; ci++) {
for (oi = 0; oi < numOutcomes; oi++) {
expectation[oi] = 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
expectation[oi] += predValue * x[vectorIndex];
}
}
logSumOfExps = ArrayMath.logSumOfExps(expectation);
for (oi = 0; oi < numOutcomes; oi++) {
expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
}
for (oi = 0; oi < numOutcomes; oi++) {
empirical = outcomeList[ci] == oi ? 1 : 0;
for (ai = 0; ai < contexts[ci].length; ai++) {
vectorIndex = indexOf(oi, contexts[ci][ai]);
predValue = values != null ? values[ci][ai] : 1.0;
gradientThread[threadIndex][vectorIndex] +=
predValue * (expectation[oi] - empirical) * numTimesEventsSeen[ci];
}
}
}
return this;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import opennlp.tools.ml.maxent.quasinewton.LineSearch.LineSearchResult;
/**
* Implementation of L-BFGS which supports L1-, L2-regularization
* and Elastic Net for solving convex optimization problems. <p>
* Usage example:
* <blockquote><pre>
* // Quadratic function f(x) = (x-1)^2 + 10
* // f obtains its minimum value 10 at x = 1
* Function f = new Function() {
*
* {@literal @}Override
* public int getDimension() {
* return 1;
* }
*
* {@literal @}Override
* public double valueAt(double[] x) {
* return Math.pow(x[0]-1, 2) + 10;
* }
*
* {@literal @}Override
* public double[] gradientAt(double[] x) {
* return new double[] { 2*(x[0]-1) };
* }
*
* };
*
* QNMinimizer minimizer = new QNMinimizer();
* double[] x = minimizer.minimize(f);
* double min = f.valueAt(x);
* </pre></blockquote>
*/
public class QNMinimizer {
// Function change rate tolerance
public static final double CONVERGE_TOLERANCE = 1e-4;
// Relative gradient norm tolerance
public static final double REL_GRAD_NORM_TOL = 1e-4;
// Initial step size
public static final double INITIAL_STEP_SIZE = 1.0;
// Minimum step size
public static final double MIN_STEP_SIZE = 1e-10;
// Default L1-cost
public static final double L1COST_DEFAULT = 0;
// Default L2-cost
public static final double L2COST_DEFAULT = 0;
// Default number of iterations
public static final int NUM_ITERATIONS_DEFAULT = 100;
// Default number of Hessian updates to store
public static final int M_DEFAULT = 15;
// Default maximum number of function evaluations
public static final int MAX_FCT_EVAL_DEFAULT = 30000;
// L1-regularization cost
private double l1Cost;
// L2-regularization cost
private double l2Cost;
// Maximum number of iterations
private int iterations;
// Number of Hessian updates to store
private int m;
// Maximum number of function evaluations
private int maxFctEval;
// Verbose output
private boolean verbose;
// Objective function's dimension
private int dimension;
// Hessian updates
private UpdateInfo updateInfo;
// For evaluating quality of training parameters.
// This is optional and can be omitted.
private Evaluator evaluator;
public QNMinimizer() {
this(L1COST_DEFAULT, L2COST_DEFAULT);
}
public QNMinimizer(double l1Cost, double l2Cost) {
this(l1Cost, l2Cost, NUM_ITERATIONS_DEFAULT);
}
public QNMinimizer(double l1Cost, double l2Cost, int iterations) {
this(l1Cost, l2Cost, iterations, M_DEFAULT, MAX_FCT_EVAL_DEFAULT);
}
public QNMinimizer(double l1Cost, double l2Cost,
int iterations, int m, int maxFctEval) {
this(l1Cost, l2Cost, iterations, m, maxFctEval, true);
}
/**
* Constructor
* @param l1Cost L1-regularization cost
* @param l2Cost L2-regularization cost
* @param iterations maximum number of iterations
* @param m number of Hessian updates to store
* @param maxFctEval maximum number of function evaluations
* @param verbose verbose output
*/
public QNMinimizer(double l1Cost, double l2Cost, int iterations,
int m, int maxFctEval, boolean verbose)
{
// Check arguments
if (l1Cost < 0 || l2Cost < 0)
throw new IllegalArgumentException(
"L1-cost and L2-cost must not be less than zero");
if (iterations <= 0)
throw new IllegalArgumentException(
"Number of iterations must be larger than zero");
if (m <= 0)
throw new IllegalArgumentException(
"Number of Hessian updates must be larger than zero");
if (maxFctEval <= 0)
throw new IllegalArgumentException(
"Maximum number of function evaluations must be larger than zero");
this.l1Cost = l1Cost;
this.l2Cost = l2Cost;
this.iterations = iterations;
this.m = m;
this.maxFctEval = maxFctEval;
this.verbose = verbose;
}
public Evaluator getEvaluator() {
return evaluator;
}
public void setEvaluator(Evaluator evaluator) {
this.evaluator = evaluator;
}
/**
* Find the parameters that minimize the objective function
* @param function objective function
* @return minimizing parameters
*/
public double[] minimize(Function function) {
Function l2RegFunction = new L2RegFunction(function, l2Cost);
this.dimension = l2RegFunction.getDimension();
this.updateInfo = new UpdateInfo(this.m, this.dimension);
// Current point is at the origin
double[] currPoint = new double[dimension];
double currValue = l2RegFunction.valueAt(currPoint);
// Gradient at the current point
double[] currGrad = new double[dimension];
System.arraycopy(l2RegFunction.gradientAt(currPoint), 0,
currGrad, 0, dimension);
// Pseudo-gradient - only use when L1-regularization is enabled
double[] pseudoGrad = null;
if (l1Cost > 0) {
currValue += l1Cost * ArrayMath.l1norm(currPoint);
pseudoGrad = new double[dimension];
computePseudoGrad(currPoint, currGrad, pseudoGrad);
}
LineSearchResult lsr;
if (l1Cost > 0) {
lsr = LineSearchResult.getInitialObjectForL1(
currValue, currGrad, pseudoGrad, currPoint);
} else {
lsr = LineSearchResult.getInitialObject(
currValue, currGrad, currPoint);
}
if (verbose) {
display("\nSolving convex optimization problem.");
display("\nObjective function has " + dimension + " variable(s).");
display("\n\nPerforming " + iterations + " iterations with " +
"L1Cost=" + l1Cost + " and L2Cost=" + l2Cost + "\n");
}
double[] direction = new double[dimension];
long startTime = System.currentTimeMillis();
// Initial step size for the 1st iteration
double initialStepSize = l1Cost > 0 ?
ArrayMath.invL2norm(lsr.getPseudoGradAtNext()) :
ArrayMath.invL2norm(lsr.getGradAtNext());
for (int iter = 1; iter <= iterations; iter++) {
// Find direction
if (l1Cost > 0) {
System.arraycopy(lsr.getPseudoGradAtNext(), 0, direction, 0, direction.length);
} else {
System.arraycopy(lsr.getGradAtNext(), 0, direction, 0, direction.length);
}
computeDirection(direction);
// Line search
if (l1Cost > 0) {
// Constrain the search direction
pseudoGrad = lsr.getPseudoGradAtNext();
for (int i = 0; i < dimension; i++) {
if (direction[i] * pseudoGrad[i] >= 0) {
direction[i] = 0;
}
}
LineSearch.doConstrainedLineSearch(l2RegFunction, direction, lsr, l1Cost, initialStepSize);
computePseudoGrad(lsr.getNextPoint(), lsr.getGradAtNext(), pseudoGrad);
lsr.setPseudoGradAtNext(pseudoGrad);
}
else {
LineSearch.doLineSearch(l2RegFunction, direction, lsr, initialStepSize);
}
// Save Hessian updates
updateInfo.update(lsr);
if (verbose) {
if (iter < 10)
display(" " + iter + ": ");
else if (iter < 100)
display(" " + iter + ": ");
else
display(iter + ": ");
if (evaluator != null) {
display("\t" + lsr.getValueAtNext() + "\t" + lsr.getFuncChangeRate()
+ "\t" + evaluator.evaluate(lsr.getNextPoint()) + "\n");
} else {
display("\t " + lsr.getValueAtNext() +
"\t" + lsr.getFuncChangeRate() + "\n");
}
}
if (isConverged(lsr))
break;
initialStepSize = INITIAL_STEP_SIZE;
}
// Undo L2-shrinkage if Elastic Net is used (since
// in that case, the shrinkage is done twice)
if (l1Cost > 0 && l2Cost > 0) {
double[] x = lsr.getNextPoint();
for (int i = 0; i < dimension; i++) {
x[i] = Math.sqrt(1 + l2Cost) * x[i];
}
}
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
display("Running time: " + (duration / 1000.) + "s\n");
// Release memory
this.updateInfo = null;
System.gc();
// Avoid returning the reference to LineSearchResult's member so that GC can
// collect memory occupied by lsr after this function completes (is it necessary?)
double[] parameters = new double[dimension];
System.arraycopy(lsr.getNextPoint(), 0, parameters, 0, dimension);
return parameters;
}
/**
* Pseudo-gradient for L1-regularization (see equation 4 in the paper
* "Scalable Training of L1-Regularized Log-Linear Models", Andrew et al. 2007)
*
* @param x current point
* @param g gradient at x
* @param pg pseudo-gradient at x which is to be computed
*/
private void computePseudoGrad(double[] x, double[] g, double[] pg) {
for (int i = 0; i < dimension; i++) {
if (x[i] < 0) {
pg[i] = g[i] - l1Cost;
}
else if (x[i] > 0) {
pg[i] = g[i] + l1Cost;
}
else {
if (g[i] < -l1Cost) {
// right partial derivative
pg[i] = g[i] + l1Cost;
}
else if (g[i] > l1Cost) {
// left partial derivative
pg[i] = g[i] - l1Cost;
}
else {
pg[i] = 0;
}
}
}
}
/**
* L-BFGS two-loop recursion (see Nocedal & Wright 2006, Numerical Optimization, p. 178)
*/
private void computeDirection(double[] direction) {
// Implemented two-loop Hessian update method.
int k = updateInfo.kCounter;
double[] rho = updateInfo.rho;
double[] alpha = updateInfo.alpha; // just to avoid recreating alpha
double[][] S = updateInfo.S;
double[][] Y = updateInfo.Y;
// First loop
for (int i = k - 1; i >= 0; i--) {
alpha[i] = rho[i] * ArrayMath.innerProduct(S[i], direction);
for (int j = 0; j < dimension; j++) {
direction[j] = direction[j] - alpha[i] * Y[i][j];
}
}
// Second loop
for (int i = 0; i < k; i++) {
double beta = rho[i] * ArrayMath.innerProduct(Y[i], direction);
for (int j = 0; j < dimension; j++) {
direction[j] = direction[j] + S[i][j] * (alpha[i] - beta);
}
}
for (int i = 0; i < dimension; i++) {
direction[i] = -direction[i];
}
}
private boolean isConverged(LineSearchResult lsr) {
// Check function's change rate
if (lsr.getFuncChangeRate() < CONVERGE_TOLERANCE) {
if (verbose)
display("Function change rate is smaller than the threshold "
+ CONVERGE_TOLERANCE + ".\nTraining will stop.\n\n");
return true;
}
// Check gradient's norm using the criteria: ||g(x)|| / max(1, ||x||) < threshold
double xNorm = Math.max(1, ArrayMath.l2norm(lsr.getNextPoint()));
double gradNorm = l1Cost > 0 ?
ArrayMath.l2norm(lsr.getPseudoGradAtNext()) : ArrayMath.l2norm(lsr.getGradAtNext());
if (gradNorm / xNorm < REL_GRAD_NORM_TOL) {
if (verbose)
display("Relative L2-norm of the gradient is smaller than the threshold "
+ REL_GRAD_NORM_TOL + ".\nTraining will stop.\n\n");
return true;
}
// Check step size
if (lsr.getStepSize() < MIN_STEP_SIZE) {
if (verbose)
display("Step size is smaller than the minimum step size "
+ MIN_STEP_SIZE + ".\nTraining will stop.\n\n");
return true;
}
// Check number of function evaluations
if (lsr.getFctEvalCount() > this.maxFctEval) {
if (verbose)
display("Maximum number of function evaluations has exceeded the threshold "
+ this.maxFctEval + ".\nTraining will stop.\n\n");
return true;
}
return false;
}
/**
* Shorthand for System.out.print
*/
private void display(String s) {
System.out.print(s);
}
/**
* Class to store vectors for Hessian approximation update.
*/
private class UpdateInfo {
private double[][] S;
private double[][] Y;
private double[] rho;
private double[] alpha;
private int m;
private int kCounter;
// Constructor
UpdateInfo(int numCorrection, int dimension) {
this.m = numCorrection;
this.kCounter = 0;
S = new double[this.m][dimension];
Y = new double[this.m][dimension];
rho = new double[this.m];
alpha = new double[this.m];
}
public void update(LineSearchResult lsr) {
double[] currPoint = lsr.getCurrPoint();
double[] gradAtCurr = lsr.getGradAtCurr();
double[] nextPoint = lsr.getNextPoint();
double[] gradAtNext = lsr.getGradAtNext();
// Inner product of S_k and Y_k
double SYk = 0.0;
// Add new ones.
if (kCounter < m) {
for (int j = 0; j < dimension; j++) {
S[kCounter][j] = nextPoint[j] - currPoint[j];
Y[kCounter][j] = gradAtNext[j] - gradAtCurr[j];
SYk += S[kCounter][j] * Y[kCounter][j];
}
rho[kCounter] = 1.0 / SYk;
}
else {
// Discard oldest vectors and add new ones.
for (int i = 0; i < m - 1; i++) {
S[i] = S[i + 1];
Y[i] = Y[i + 1];
rho[i] = rho[i + 1];
}
for (int j = 0; j < dimension; j++) {
S[m - 1][j] = nextPoint[j] - currPoint[j];
Y[m - 1][j] = gradAtNext[j] - gradAtCurr[j];
SYk += S[m - 1][j] * Y[m - 1][j];
}
rho[m - 1] = 1.0 / SYk;
}
if (kCounter < m)
kCounter++;
}
}
/**
* L2-regularized objective function
*/
public static class L2RegFunction implements Function {
private Function f;
private double l2Cost;
public L2RegFunction(Function f, double l2Cost) {
this.f = f;
this.l2Cost = l2Cost;
}
@Override
public int getDimension() {
return f.getDimension();
}
@Override
public double valueAt(double[] x) {
checkDimension(x);
double value = f.valueAt(x);
if (l2Cost > 0) {
value += l2Cost * ArrayMath.innerProduct(x, x);
}
return value;
}
@Override
public double[] gradientAt(double[] x) {
checkDimension(x);
double[] gradient = f.gradientAt(x);
if (l2Cost > 0) {
for (int i = 0; i < x.length; i++) {
gradient[i] += 2 * l2Cost * x[i];
}
}
return gradient;
}
private void checkDimension(double[] x) {
if (x.length != getDimension())
throw new IllegalArgumentException(
"x's dimension is not the same as function's dimension");
}
}
/**
* Evaluate quality of training parameters. For example,
* it can be used to report model's training accuracy when
* we train a Maximum Entropy classifier.
*/
public interface Evaluator {
/**
* Measure quality of the training parameters
* @param parameters
* @return evaluated result
*/
double evaluate(double[] parameters);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/QNModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Context;
public class QNModel extends AbstractModel {
public QNModel(Context[] params, String[] predLabels, String[] outcomeNames) {
super(params, predLabels, outcomeNames);
this.modelType = ModelType.MaxentQn;
}
public int getNumOutcomes() {
return this.outcomeNames.length;
}
private Context getPredIndex(String predicate) {
return pmap.get(predicate);
}
public double[] eval(String[] context) {
return eval(context, new double[evalParams.getNumOutcomes()]);
}
public double[] eval(String[] context, double[] probs) {
return eval(context, null, probs);
}
public double[] eval(String[] context, float[] values) {
return eval(context, values, new double[evalParams.getNumOutcomes()]);
}
/**
* Model evaluation which should be used during inference.
* @param context
* The predicates which have been observed at the present
* decision point.
* @param values
* Weights of the predicates which have been observed at
* the present decision point.
* @param probs
* Probability for outcomes.
* @return Normalized probabilities for the outcomes given the context.
*/
private double[] eval(String[] context, float[] values, double[] probs) {
for (int ci = 0; ci < context.length; ci++) {
Context pred = getPredIndex(context[ci]);
if (pred != null) {
double predValue = 1.0;
if (values != null) predValue = values[ci];
double[] parameters = pred.getParameters();
int[] outcomes = pred.getOutcomes();
for (int i = 0; i < outcomes.length; i++) {
int oi = outcomes[i];
probs[oi] += predValue * parameters[i];
}
}
}
double logSumExp = ArrayMath.logSumOfExps(probs);
for (int oi = 0; oi < outcomeNames.length; oi++) {
probs[oi] = Math.exp(probs[oi] - logSumExp);
}
return probs;
}
/**
* Model evaluation which should be used during training to report model accuracy.
* @param context
* Indices of the predicates which have been observed at the present
* decision point.
* @param values
* Weights of the predicates which have been observed at
* the present decision point.
* @param probs
* Probability for outcomes
* @param nOutcomes
* Number of outcomes
* @param nPredLabels
* Number of unique predicates
* @param parameters
* Model parameters
* @return Normalized probabilities for the outcomes given the context.
*/
static double[] eval(int[] context, float[] values, double[] probs,
int nOutcomes, int nPredLabels, double[] parameters) {
for (int i = 0; i < context.length; i++) {
int predIdx = context[i];
double predValue = values != null ? values[i] : 1.0;
for (int oi = 0; oi < nOutcomes; oi++) {
probs[oi] += predValue * parameters[oi * nPredLabels + predIdx];
}
}
double logSumExp = ArrayMath.logSumOfExps(probs);
for (int oi = 0; oi < nOutcomes; oi++) {
probs[oi] = Math.exp(probs[oi] - logSumExp);
}
return probs;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/maxent/quasinewton/QNTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.maxent.quasinewton;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.maxent.quasinewton.QNMinimizer.Evaluator;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.util.TrainingParameters;
/**
* Maxent model trainer using L-BFGS algorithm.
*/
public class QNTrainer extends AbstractEventTrainer {
public static final String MAXENT_QN_VALUE = "MAXENT_QN";
public static final String THREADS_PARAM = "Threads";
public static final int THREADS_DEFAULT = 1;
public static final String L1COST_PARAM = "L1Cost";
public static final double L1COST_DEFAULT = 0.1;
public static final String L2COST_PARAM = "L2Cost";
public static final double L2COST_DEFAULT = 0.1;
// Number of Hessian updates to store
public static final String M_PARAM = "NumOfUpdates";
public static final int M_DEFAULT = 15;
// Maximum number of function evaluations
public static final String MAX_FCT_EVAL_PARAM = "MaxFctEval";
public static final int MAX_FCT_EVAL_DEFAULT = 30000;
// Number of threads
private int threads;
// L1-regularization cost
private double l1Cost;
// L2-regularization cost
private double l2Cost;
// Settings for QNMinimizer
private int m;
private int maxFctEval;
public QNTrainer(TrainingParameters parameters) {
super(parameters);
}
// Constructor -- to log. For testing purpose
public QNTrainer(boolean printMessages) {
this(M_DEFAULT, printMessages);
}
// Constructor -- m : number of hessian updates to store. For testing purpose
public QNTrainer(int m) {
this(m, true);
}
// Constructor -- to log, number of hessian updates to store. For testing purpose
public QNTrainer(int m, boolean verbose) {
this(m, MAX_FCT_EVAL_DEFAULT, verbose);
}
// For testing purpose
public QNTrainer(int m, int maxFctEval, boolean printMessages) {
this.printMessages = printMessages;
this.m = m < 0 ? M_DEFAULT : m;
this.maxFctEval = maxFctEval < 0 ? MAX_FCT_EVAL_DEFAULT : maxFctEval;
this.threads = THREADS_DEFAULT;
this.l1Cost = L1COST_DEFAULT;
this.l2Cost = L2COST_DEFAULT;
}
// >> Members related to AbstractEventTrainer
public QNTrainer() {
}
@Override
public void init(TrainingParameters trainingParameters, Map<String, String> reportMap) {
super.init(trainingParameters,reportMap);
this.m = trainingParameters.getIntParameter(M_PARAM, M_DEFAULT);
this.maxFctEval = trainingParameters.getIntParameter(MAX_FCT_EVAL_PARAM, MAX_FCT_EVAL_DEFAULT);
this.threads = trainingParameters.getIntParameter(THREADS_PARAM, THREADS_DEFAULT);
this.l1Cost = trainingParameters.getDoubleParameter(L1COST_PARAM, L1COST_DEFAULT);
this.l2Cost = trainingParameters.getDoubleParameter(L2COST_PARAM, L2COST_DEFAULT);
}
@Override
@Deprecated
public void init(Map<String, String> trainParams, Map<String, String> reportMap) {
init(new TrainingParameters(trainParams),reportMap);
}
@Override
public void validate() {
super.validate();
String algorithmName = getAlgorithm();
if (algorithmName != null && !(MAXENT_QN_VALUE.equals(algorithmName))) {
throw new IllegalArgumentException("algorithmName must be MAXENT_QN");
}
// Number of Hessian updates to remember
if (m < 0) {
throw new IllegalArgumentException(
"Number of Hessian updates to remember must be >= 0");
}
// Maximum number of function evaluations
if (maxFctEval < 0) {
throw new IllegalArgumentException(
"Maximum number of function evaluations must be >= 0");
}
// Number of threads must be >= 1
if (threads < 1) {
throw new IllegalArgumentException("Number of threads must be >= 1");
}
// Regularization costs must be >= 0
if (l1Cost < 0) {
throw new IllegalArgumentException("Regularization costs must be >= 0");
}
if (l2Cost < 0) {
throw new IllegalArgumentException("Regularization costs must be >= 0");
}
}
@Deprecated
@Override
public boolean isValid() {
try {
validate();
return true;
}
catch (IllegalArgumentException e) {
return false;
}
}
public boolean isSortAndMerge() {
return true;
}
public AbstractModel doTrain(DataIndexer indexer) throws IOException {
int iterations = getIterations();
return trainModel(iterations, indexer);
}
// << Members related to AbstractEventTrainer
public QNModel trainModel(int iterations, DataIndexer indexer) {
// Train model's parameters
Function objectiveFunction;
if (threads == 1) {
System.out.println("Computing model parameters ...");
objectiveFunction = new NegLogLikelihood(indexer);
} else {
System.out.println("Computing model parameters in " + threads + " threads ...");
objectiveFunction = new ParallelNegLogLikelihood(indexer, threads);
}
QNMinimizer minimizer = new QNMinimizer(
l1Cost, l2Cost, iterations, m, maxFctEval, printMessages);
minimizer.setEvaluator(new ModelEvaluator(indexer));
double[] parameters = minimizer.minimize(objectiveFunction);
// Construct model with trained parameters
String[] predLabels = indexer.getPredLabels();
int nPredLabels = predLabels.length;
String[] outcomeNames = indexer.getOutcomeLabels();
int nOutcomes = outcomeNames.length;
Context[] params = new Context[nPredLabels];
for (int ci = 0; ci < params.length; ci++) {
List<Integer> outcomePattern = new ArrayList<>(nOutcomes);
List<Double> alpha = new ArrayList<>(nOutcomes);
for (int oi = 0; oi < nOutcomes; oi++) {
double val = parameters[oi * nPredLabels + ci];
outcomePattern.add(oi);
alpha.add(val);
}
params[ci] = new Context(ArrayMath.toIntArray(outcomePattern),
ArrayMath.toDoubleArray(alpha));
}
return new QNModel(params, predLabels, outcomeNames);
}
/**
* For measuring model's training accuracy
*/
private static class ModelEvaluator implements Evaluator {
private DataIndexer indexer;
public ModelEvaluator(DataIndexer indexer) {
this.indexer = indexer;
}
/**
* Evaluate the current model on training data set
* @return model's training accuracy
*/
@Override
public double evaluate(double[] parameters) {
int[][] contexts = indexer.getContexts();
float[][] values = indexer.getValues();
int[] nEventsSeen = indexer.getNumTimesEventsSeen();
int[] outcomeList = indexer.getOutcomeList();
int nOutcomes = indexer.getOutcomeLabels().length;
int nPredLabels = indexer.getPredLabels().length;
int nCorrect = 0;
int nTotalEvents = 0;
for (int ei = 0; ei < contexts.length; ei++) {
int[] context = contexts[ei];
float[] value = values == null ? null : values[ei];
double[] probs = new double[nOutcomes];
QNModel.eval(context, value, probs, nOutcomes, nPredLabels, parameters);
int outcome = ArrayMath.maxIdx(probs);
if (outcome == outcomeList[ei]) {
nCorrect += nEventsSeen[ei];
}
nTotalEvents += nEventsSeen[ei];
}
return (double) nCorrect / nTotalEvents;
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/AbstractDataIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import opennlp.tools.ml.AbstractTrainer;
import opennlp.tools.util.InsufficientTrainingDataException;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
/**
* Abstract class for collecting event and context counts used in training.
*
*/
public abstract class AbstractDataIndexer implements DataIndexer {
public static final String CUTOFF_PARAM = AbstractTrainer.CUTOFF_PARAM;
public static final int CUTOFF_DEFAULT = AbstractTrainer.CUTOFF_DEFAULT;
public static final String SORT_PARAM = "sort";
public static final boolean SORT_DEFAULT = true;
protected TrainingParameters trainingParameters;
protected Map<String,String> reportMap;
protected boolean printMessages;
public void init(TrainingParameters indexingParameters,Map<String, String> reportMap) {
this.reportMap = reportMap;
if (this.reportMap == null) reportMap = new HashMap<>();
trainingParameters = indexingParameters;
printMessages = trainingParameters.getBooleanParameter(AbstractTrainer.VERBOSE_PARAM,
AbstractTrainer.VERBOSE_DEFAULT);
}
private int numEvents;
/** The integer contexts associated with each unique event. */
protected int[][] contexts;
/** The integer outcome associated with each unique event. */
protected int[] outcomeList;
/** The number of times an event occured in the training data. */
protected int[] numTimesEventsSeen;
/** The predicate/context names. */
protected String[] predLabels;
/** The names of the outcomes. */
protected String[] outcomeLabels;
/** The number of times each predicate occured. */
protected int[] predCounts;
public int[][] getContexts() {
return contexts;
}
public int[] getNumTimesEventsSeen() {
return numTimesEventsSeen;
}
public int[] getOutcomeList() {
return outcomeList;
}
public String[] getPredLabels() {
return predLabels;
}
public String[] getOutcomeLabels() {
return outcomeLabels;
}
public int[] getPredCounts() {
return predCounts;
}
/**
* Sorts and uniques the array of comparable events and return the number of unique events.
* This method will alter the eventsToCompare array -- it does an in place
* sort, followed by an in place edit to remove duplicates.
*
* @param eventsToCompare a <code>ComparableEvent[]</code> value
* @return The number of unique events in the specified list.
* @throws InsufficientTrainingDataException if not enough events are provided
* @since maxent 1.2.6
*/
protected int sortAndMerge(List<ComparableEvent> eventsToCompare, boolean sort)
throws InsufficientTrainingDataException {
int numUniqueEvents = 1;
numEvents = eventsToCompare.size();
if (sort && eventsToCompare.size() > 0) {
Collections.sort(eventsToCompare);
ComparableEvent ce = eventsToCompare.get(0);
for (int i = 1; i < numEvents; i++) {
ComparableEvent ce2 = eventsToCompare.get(i);
if (ce.compareTo(ce2) == 0) {
ce.seen++; // increment the seen count
eventsToCompare.set(i, null); // kill the duplicate
}
else {
ce = ce2; // a new champion emerges...
numUniqueEvents++; // increment the # of unique events
}
}
}
else {
numUniqueEvents = eventsToCompare.size();
}
if (numUniqueEvents == 0) {
throw new InsufficientTrainingDataException("Insufficient training data to create model.");
}
if (sort) display("done. Reduced " + numEvents + " events to " + numUniqueEvents + ".\n");
contexts = new int[numUniqueEvents][];
outcomeList = new int[numUniqueEvents];
numTimesEventsSeen = new int[numUniqueEvents];
for (int i = 0, j = 0; i < numEvents; i++) {
ComparableEvent evt = eventsToCompare.get(i);
if (null == evt) {
continue; // this was a dupe, skip over it.
}
numTimesEventsSeen[j] = evt.seen;
outcomeList[j] = evt.outcome;
contexts[j] = evt.predIndexes;
++j;
}
return numUniqueEvents;
}
protected List<ComparableEvent> index(ObjectStream<Event> events,
Map<String, Integer> predicateIndex) throws IOException {
Map<String, Integer> omap = new HashMap<>();
List<ComparableEvent> eventsToCompare = new ArrayList<>();
Event ev;
while ((ev = events.read()) != null) {
omap.putIfAbsent(ev.getOutcome(), omap.size());
int[] cons = Arrays.stream(ev.getContext())
.map(pred -> predicateIndex.get(pred))
.filter(Objects::nonNull)
.mapToInt(i -> i).toArray();
// drop events with no active features
if (cons.length > 0) {
int ocID = omap.get(ev.getOutcome());
eventsToCompare.add(new ComparableEvent(ocID, cons, ev.getValues()));
} else {
display("Dropped event " + ev.getOutcome() + ":"
+ Arrays.asList(ev.getContext()) + "\n");
}
}
outcomeLabels = toIndexedStringArray(omap);
predLabels = toIndexedStringArray(predicateIndex);
return eventsToCompare;
}
public int getNumEvents() {
return numEvents;
}
/**
* Updates the set of predicated and counter with the specified event contexts and cutoff.
* @param ec The contexts/features which occur in a event.
* @param predicateSet The set of predicates which will be used for model building.
* @param counter The predicate counters.
* @param cutoff The cutoff which determines whether a predicate is included.
* @deprecated will be removed after 1.8.1 release
*/
@Deprecated
protected static void update(String[] ec, Set<String> predicateSet,
Map<String,Integer> counter, int cutoff) {
for (String s : ec) {
counter.merge(s, 1, (value, one) -> value + one);
if (!predicateSet.contains(s) && counter.get(s) >= cutoff) {
predicateSet.add(s);
}
}
}
/**
* Updates the set of predicated and counter with the specified event contexts.
* @param ec The contexts/features which occur in a event.
* @param counter The predicate counters.
*/
protected static void update(String[] ec, Map<String,Integer> counter) {
for (String s : ec) {
counter.merge(s, 1, (value, one) -> value + one);
}
}
/**
* Utility method for creating a String[] array from a map whose
* keys are labels (Strings) to be stored in the array and whose
* values are the indices (Integers) at which the corresponding
* labels should be inserted.
*
* @param labelToIndexMap a <code>TObjectIntHashMap</code> value
* @return a <code>String[]</code> value
*/
protected static String[] toIndexedStringArray(Map<String, Integer> labelToIndexMap) {
return labelToIndexMap.entrySet().stream().sorted(Comparator.comparingInt(Map.Entry::getValue))
.map(Map.Entry::getKey).toArray(String[]::new);
}
public float[][] getValues() {
return null;
}
protected void display(String s) {
if (printMessages) {
System.out.print(s);
}
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/AbstractModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public abstract class AbstractModel implements MaxentModel {
/** Mapping between predicates/contexts and an integer representing them. */
protected Map<String, Context> pmap;
/** The names of the outcomes. */
protected String[] outcomeNames;
/** Parameters for the model. */
protected EvalParameters evalParams;
/** Prior distribution for this model. */
protected Prior prior;
public enum ModelType { Maxent,Perceptron,MaxentQn,NaiveBayes }
/** The type of the model. */
protected ModelType modelType;
protected AbstractModel(Context[] params, String[] predLabels,
Map<String, Context> pmap, String[] outcomeNames) {
this.pmap = pmap;
this.outcomeNames = outcomeNames;
this.evalParams = new EvalParameters(params,outcomeNames.length);
}
public AbstractModel(Context[] params, String[] predLabels, String[] outcomeNames) {
init(predLabels, params, outcomeNames);
this.evalParams = new EvalParameters(params, outcomeNames.length);
}
private void init(String[] predLabels, Context[] params, String[] outcomeNames) {
this.pmap = new HashMap<>(predLabels.length);
for (int i = 0; i < predLabels.length; i++) {
pmap.put(predLabels[i], params[i]);
}
this.outcomeNames = outcomeNames;
}
/**
* Return the name of the outcome corresponding to the highest likelihood
* in the parameter ocs.
*
* @param ocs A double[] as returned by the eval(String[] context)
* method.
* @return The name of the most likely outcome.
*/
public final String getBestOutcome(double[] ocs) {
int best = 0;
for (int i = 1; i < ocs.length; i++)
if (ocs[i] > ocs[best]) best = i;
return outcomeNames[best];
}
public ModelType getModelType() {
return modelType;
}
/**
* Return a string matching all the outcome names with all the
* probabilities produced by the <code>eval(String[] context)</code>
* method.
*
* @param ocs A <code>double[]</code> as returned by the
* <code>eval(String[] context)</code>
* method.
* @return String containing outcome names paired with the normalized
* probability (contained in the <code>double[] ocs</code>)
* for each one.
*/
public final String getAllOutcomes(double[] ocs) {
if (ocs.length != outcomeNames.length) {
return "The double array sent as a parameter to GISModel.getAllOutcomes() " +
"must not have been produced by this model.";
}
else {
DecimalFormat df = new DecimalFormat("0.0000");
StringBuilder sb = new StringBuilder(ocs.length * 2);
sb.append(outcomeNames[0]).append("[").append(df.format(ocs[0])).append("]");
for (int i = 1; i < ocs.length; i++) {
sb.append(" ").append(outcomeNames[i]).append("[").append(df.format(ocs[i])).append("]");
}
return sb.toString();
}
}
/**
* Return the name of an outcome corresponding to an int id.
*
* @param i An outcome id.
* @return The name of the outcome associated with that id.
*/
public final String getOutcome(int i) {
return outcomeNames[i];
}
/**
* Gets the index associated with the String name of the given outcome.
*
* @param outcome the String name of the outcome for which the
* index is desired
* @return the index if the given outcome label exists for this
* model, -1 if it does not.
**/
public int getIndex(String outcome) {
for (int i = 0; i < outcomeNames.length; i++) {
if (outcomeNames[i].equals(outcome))
return i;
}
return -1;
}
public int getNumOutcomes() {
return evalParams.getNumOutcomes();
}
/**
* Provides the fundamental data structures which encode the maxent model
* information. This method will usually only be needed by
* GISModelWriters. The following values are held in the Object array
* which is returned by this method:
* <ul>
* <li>index 0: opennlp.tools.ml.maxent.Context[] containing the model
* parameters
* <li>index 1: java.util.Map containing the mapping of model predicates
* to unique integers
* <li>index 2: java.lang.String[] containing the names of the outcomes,
* stored in the index of the array which represents their
* unique ids in the model.
* </ul>
*
* @return An Object[] with the values as described above.
*/
public final Object[] getDataStructures() {
Object[] data = new Object[3];
data[0] = evalParams.getParams();
data[1] = pmap;
data[2] = outcomeNames;
return data;
}
@Override
public int hashCode() {
return Objects.hash(pmap, Arrays.hashCode(outcomeNames), evalParams, prior);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof AbstractModel) {
AbstractModel model = (AbstractModel) obj;
return pmap.equals(model.pmap) && Objects.deepEquals(outcomeNames, model.outcomeNames)
&& Objects.equals(prior, model.prior);
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/AbstractModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.StringTokenizer;
import java.util.zip.GZIPInputStream;
import org.apache.commons.lang3.StringUtils;
import ai.idylnlp.opennlp.custom.encryption.OpenNLPEncryptionFactory;
public abstract class AbstractModelReader {
/**
* The number of predicates contained in the model.
*/
protected int NUM_PREDS;
protected DataReader dataReader;
public AbstractModelReader(File f) throws IOException {
String filename = f.getName();
InputStream input;
// handle the zipped/not zipped distinction
if (filename.endsWith(".gz")) {
input = new GZIPInputStream(new FileInputStream(f));
filename = filename.substring(0,filename.length() - 3);
}
else {
input = new FileInputStream(f);
}
// handle the different formats
if (filename.endsWith(".bin")) {
this.dataReader = new BinaryFileDataReader(input);
}
else { // filename ends with ".txt"
this.dataReader = new PlainTextFileDataReader(input);
}
}
public AbstractModelReader(DataReader dataReader) {
super();
this.dataReader = dataReader;
}
/**
* Implement as needed for the format the model is stored in.
*/
public int readInt() throws java.io.IOException {
return dataReader.readInt();
}
/**
* Implement as needed for the format the model is stored in.
*/
public double readDouble() throws java.io.IOException {
return dataReader.readDouble();
}
/**
* Implement as needed for the format the model is stored in.
*/
public String readUTF() throws java.io.IOException {
String s = dataReader.readUTF();
if(StringUtils.isNotEmpty(OpenNLPEncryptionFactory.getDefault().getKey())) {
try {
// Decrypt the read string.
s = OpenNLPEncryptionFactory.getDefault().decrypt(s);
} catch (Exception ex) {
throw new RuntimeException("Unable to load the model. Verify that your license key and encryption key are correct.", ex);
}
}
return s;
}
public AbstractModel getModel() throws IOException {
checkModelType();
return constructModel();
}
public abstract void checkModelType() throws java.io.IOException;
public abstract AbstractModel constructModel() throws java.io.IOException;
protected String[] getOutcomes() throws java.io.IOException {
int numOutcomes = readInt();
String[] outcomeLabels = new String[numOutcomes];
for (int i = 0; i < numOutcomes; i++) outcomeLabels[i] = readUTF();
return outcomeLabels;
}
protected int[][] getOutcomePatterns() throws java.io.IOException {
int numOCTypes = readInt();
int[][] outcomePatterns = new int[numOCTypes][];
for (int i = 0; i < numOCTypes; i++) {
StringTokenizer tok = new StringTokenizer(readUTF(), " ");
int[] infoInts = new int[tok.countTokens()];
for (int j = 0; tok.hasMoreTokens(); j++) {
infoInts[j] = Integer.parseInt(tok.nextToken());
}
outcomePatterns[i] = infoInts;
}
return outcomePatterns;
}
protected String[] getPredicates() throws java.io.IOException {
NUM_PREDS = readInt();
String[] predLabels = new String[NUM_PREDS];
for (int i = 0; i < NUM_PREDS; i++)
predLabels[i] = readUTF();
return predLabels;
}
/**
* Reads the parameters from a file and populates an array of context objects.
* @param outcomePatterns The outcomes patterns for the model. The first index refers to which
* outcome pattern (a set of outcomes that occurs with a context) is being specified. The
* second index specifies the number of contexts which use this pattern at index 0, and the
* index of each outcomes which make up this pattern in indicies 1-n.
* @return An array of context objects.
* @throws java.io.IOException when the model file does not match the outcome patterns or can not be read.
*/
protected Context[] getParameters(int[][] outcomePatterns) throws java.io.IOException {
Context[] params = new Context[NUM_PREDS];
int pid = 0;
for (int i = 0; i < outcomePatterns.length; i++) {
//construct outcome pattern
int[] outcomePattern = new int[outcomePatterns[i].length - 1];
System.arraycopy(outcomePatterns[i], 1, outcomePattern, 0, outcomePatterns[i].length - 1);
//populate parameters for each context which uses this outcome pattern.
for (int j = 0; j < outcomePatterns[i][0]; j++) {
double[] contextParameters = new double[outcomePatterns[i].length - 1];
for (int k = 1; k < outcomePatterns[i].length; k++) {
contextParameters[k - 1] = readDouble();
}
params[pid] = new Context(outcomePattern,contextParameters);
pid++;
}
}
return params;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/AbstractModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
public abstract class AbstractModelWriter {
public AbstractModelWriter() {
super();
}
public abstract void writeUTF(String s) throws java.io.IOException;
public abstract void writeInt(int i) throws java.io.IOException;
public abstract void writeDouble(double d) throws java.io.IOException;
public abstract void close() throws java.io.IOException;
public abstract void persist() throws java.io.IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/BinaryFileDataReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
public class BinaryFileDataReader implements DataReader {
private DataInputStream input;
public BinaryFileDataReader(File f) throws IOException {
if (f.getName().endsWith(".gz")) {
input = new DataInputStream(new BufferedInputStream(
new GZIPInputStream(new BufferedInputStream(new FileInputStream(f)))));
}
else {
input = new DataInputStream(new BufferedInputStream(new FileInputStream(f)));
}
}
public BinaryFileDataReader(InputStream in) {
input = new DataInputStream(in);
}
public BinaryFileDataReader(DataInputStream in) {
input = in;
}
public double readDouble() throws IOException {
return input.readDouble();
}
public int readInt() throws IOException {
return input.readInt();
}
public String readUTF() throws IOException {
return input.readUTF();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/ComparableEvent.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Arrays;
import java.util.Objects;
/**
* A maxent event representation which we can use to sort based on the
* predicates indexes contained in the events.
*/
public class ComparableEvent implements Comparable<ComparableEvent> {
public int outcome;
public int[] predIndexes;
public int seen = 1; // the number of times this event has been seen.
public float[] values;
public ComparableEvent(int oc, int[] pids, float[] values) {
outcome = oc;
this.values = values;
predIndexes = pids;
}
public ComparableEvent(int oc, int[] pids) {
this(oc, pids, null);
}
public int compareTo(ComparableEvent ce) {
int compareOutcome = Integer.compare(outcome, ce.outcome);
if (compareOutcome != 0) {
return compareOutcome;
}
int smallerLength = Math.min(predIndexes.length, ce.predIndexes.length);
for (int i = 0; i < smallerLength; i++) {
int comparePredIndexes = Integer.compare(predIndexes[i], ce.predIndexes[i]);
if (comparePredIndexes != 0) {
return comparePredIndexes;
}
if (values != null && ce.values != null) {
float compareValues = Float.compare(values[i], ce.values[i]);
if (!Float.valueOf(compareValues).equals(Float.valueOf(0.0f))) {
return (int) compareValues;
}
} else if (values != null) {
float compareValues = Float.compare(values[i], 1.0f);
if (!Float.valueOf(compareValues).equals(Float.valueOf(0.0f))) {
return (int) compareValues;
}
} else if (ce.values != null) {
float compareValues = Float.compare(1.0f, ce.values[i]);
if (!Float.valueOf(compareValues).equals(Float.valueOf(0.0f))) {
return (int) compareValues;
}
}
}
int comparePredIndexesLength = Integer.compare(predIndexes.length, ce.predIndexes.length);
if (comparePredIndexesLength != 0) {
return comparePredIndexesLength;
}
return 0;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj instanceof ComparableEvent) {
ComparableEvent other = (ComparableEvent) obj;
return outcome == other.outcome &&
Arrays.equals(predIndexes, other.predIndexes) &&
seen == other.seen &&
Arrays.equals(values, other.values);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(outcome, Arrays.hashCode(predIndexes), seen, Arrays.hashCode(values));
}
public String toString() {
StringBuilder s = new StringBuilder().append(outcome).append(":");
for (int i = 0; i < predIndexes.length; i++) {
s.append(" ").append(predIndexes[i]);
if (values != null) {
s.append("=").append(values[i]);
}
}
return s.toString();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/ComparablePredicate.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Arrays;
import java.util.Objects;
/**
* A maxent predicate representation which we can use to sort based on the
* outcomes. This allows us to make the mapping of features to their parameters
* much more compact.
*/
public class ComparablePredicate implements Comparable<ComparablePredicate> {
public String name;
public int[] outcomes;
public double[] params;
public ComparablePredicate(String n, int[] ocs, double[] ps) {
name = n;
outcomes = ocs;
params = ps;
}
public int compareTo(ComparablePredicate cp) {
int smallerLength = Math.min(outcomes.length, cp.outcomes.length);
for (int i = 0; i < smallerLength; i++) {
int compareOutcomes = Integer.compare(outcomes[i], cp.outcomes[i]);
if (compareOutcomes != 0) {
return compareOutcomes;
}
}
int compareOutcomesLength = Integer.compare(outcomes.length, cp.outcomes.length);
if (compareOutcomesLength != 0) {
return compareOutcomesLength;
}
return 0;
}
@Override
public int hashCode() {
return Objects.hash(name, Arrays.hashCode(outcomes), Arrays.hashCode(params));
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj instanceof ComparablePredicate) {
ComparablePredicate other = (ComparablePredicate) obj;
return Objects.equals(name, other.name) &&
Arrays.equals(outcomes, other.outcomes) &&
Arrays.equals(params, other.params);
}
return false;
}
public String toString() {
StringBuilder s = new StringBuilder();
for (int outcome : outcomes) {
s.append(" ").append(outcome);
}
return s.toString();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/Context.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Arrays;
import java.util.Objects;
/**
* Class which associates a real valued parameter or expected value with a particular contextual
* predicate or feature. This is used to store maxent model parameters as well as model and empirical
* expected values.
*/
public class Context {
/** The real valued parameters or expected values for this context. */
protected double[] parameters;
/** The outcomes which occur with this context. */
protected int[] outcomes;
/**
* Creates a new parameters object with the specified parameters associated with the specified
* outcome pattern.
* @param outcomePattern Array of outcomes for which parameters exists for this context.
* @param parameters Parameters for the outcomes specified.
*/
public Context(int[] outcomePattern, double[] parameters) {
this.outcomes = outcomePattern;
this.parameters = parameters;
}
/**
* Returns the outcomes for which parameters exists for this context.
* @return Array of outcomes for which parameters exists for this context.
*/
public int[] getOutcomes() {
return outcomes;
}
/**
* Returns the parameters or expected values for the outcomes which occur with this context.
* @return Array of parameters for the outcomes of this context.
*/
public double[] getParameters() {
return parameters;
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(parameters), Arrays.hashCode(outcomes));
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof Context) {
Context context = (Context) obj;
return Arrays.equals(parameters, context.parameters)
&& Arrays.equals(outcomes, context.outcomes);
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/DataIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.util.Map;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.TrainingParameters;
/** Object which compresses events in memory and performs feature selection.
*/
public interface DataIndexer {
/**
* Returns the array of predicates seen in each event.
* @return a 2-D array whose first dimension is the event index and array this refers to contains
* the contexts for that event.
*/
int[][] getContexts();
/**
* Returns an array indicating the number of times a particular event was seen.
* @return an array indexed by the event index indicating the number of times a particular event was seen.
*/
int[] getNumTimesEventsSeen();
/**
* Returns an array indicating the outcome index for each event.
* @return an array indicating the outcome index for each event.
*/
int[] getOutcomeList();
/**
* Returns an array of predicate/context names.
* @return an array of predicate/context names indexed by context index. These indices are the
* value of the array returned by <code>getContexts</code>.
*/
String[] getPredLabels();
/**
* Returns an array of the count of each predicate in the events.
* @return an array of the count of each predicate in the events.
*/
int[] getPredCounts();
/**
* Returns an array of outcome names.
* @return an array of outcome names indexed by outcome index.
*/
String[] getOutcomeLabels();
/**
* Returns the values associated with each event context or null if integer values are to be used.
* @return the values associated with each event context.
*/
float[][] getValues();
/**
* Returns the number of total events indexed.
* @return The number of total events indexed.
*/
int getNumEvents();
/**
* Sets parameters used during the data indexing.
* @param trainParams {@link TrainingParameters}
*/
void init(TrainingParameters trainParams,Map<String,String> reportMap);
/**
* Performs the data indexing. Make sure the init(...) method is called first.
*
* @param eventStream {@link ObjectStream<Event>}
*/
void index(ObjectStream<Event> eventStream) throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/DataIndexerFactory.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.HashMap;
import java.util.Map;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.util.TrainingParameters;
import opennlp.tools.util.ext.ExtensionLoader;
public class DataIndexerFactory {
public static DataIndexer getDataIndexer(TrainingParameters parameters, Map<String, String> reportMap) {
// The default is currently a 2-Pass data index. Is this what we really want?
String indexerParam = parameters.getStringParameter(AbstractEventTrainer.DATA_INDEXER_PARAM,
AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
// allow the user to pass in a report map. If the don't, create one.
if (reportMap == null) {
reportMap = new HashMap<>();
}
DataIndexer indexer;
switch (indexerParam) {
case AbstractEventTrainer.DATA_INDEXER_ONE_PASS_VALUE:
indexer = new OnePassDataIndexer();
break;
case AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE:
indexer = new TwoPassDataIndexer();
break;
case AbstractEventTrainer.DATA_INDEXER_ONE_PASS_REAL_VALUE:
indexer = new OnePassRealValueDataIndexer();
break;
default:
// if the user passes in a class name for the indexer, try to instantiate the class.
indexer = ExtensionLoader.instantiateExtension(DataIndexer.class, indexerParam);
}
indexer.init(parameters, reportMap);
return indexer;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/DataReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
public interface DataReader {
public double readDouble() throws IOException;
public int readInt() throws IOException;
public String readUTF() throws IOException;
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/DynamicEvalParameters.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.List;
public class DynamicEvalParameters {
/** Mapping between outcomes and paramater values for each context.
* The integer representation of the context can be found using <code>pmap</code>.*/
private List<? extends Context> params;
/** The number of outcomes being predicted. */
private final int numOutcomes;
/**
* Creates a set of paramters which can be evaulated with the eval method.
* @param params The parameters of the model.
* @param numOutcomes The number of outcomes.
*/
public DynamicEvalParameters(List<? extends Context> params, int numOutcomes) {
this.params = params;
this.numOutcomes = numOutcomes;
}
public Context[] getParams() {
return params.toArray(new Context[params.size()]);
}
public int getNumOutcomes() {
return numOutcomes;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/EvalParameters.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Arrays;
import java.util.Objects;
/**
* This class encapsulates the varibales used in producing probabilities from a model
* and facilitaes passing these variables to the eval method.
*/
public class EvalParameters {
/**
* Mapping between outcomes and parameter values for each context.
* The integer representation of the context can be found using <code>pmap</code>.
*/
private Context[] params;
/**
* The number of outcomes being predicted.
*/
private final int numOutcomes;
/**
* The maximum number of features fired in an event. Usually referred to as C.
* This is used to normalize the number of features which occur in an event. */
private double correctionConstant;
public EvalParameters(Context[] params, int numOutcomes) {
this.params = params;
this.numOutcomes = numOutcomes;
}
public Context[] getParams() {
return params;
}
public int getNumOutcomes() {
return numOutcomes;
}
@Override
public int hashCode() {
return Objects.hash(Arrays.hashCode(params), numOutcomes, correctionConstant);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof EvalParameters) {
EvalParameters evalParameters = (EvalParameters) obj;
return Arrays.equals(params, evalParameters.params)
&& numOutcomes == evalParameters.numOutcomes
&& correctionConstant == evalParameters.correctionConstant;
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/Event.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Objects;
/**
* The context of a decision point during training. This includes
* contextual predicates and an outcome.
*/
public class Event {
private final String outcome;
private final String[] context;
private final float[] values;
public Event(String outcome, String[] context) {
this(outcome,context,null);
}
public Event(String outcome, String[] context, float[] values) {
this.outcome = Objects.requireNonNull(outcome, "outcome must not be null");
this.context = Objects.requireNonNull(context, "context must not be null");
this.values = values;
}
public String getOutcome() {
return outcome;
}
public String[] getContext() {
return context;
}
public float[] getValues() {
return values;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(outcome).append(" [");
if (context.length > 0) {
sb.append(context[0]);
if (values != null) {
sb.append("=").append(values[0]);
}
}
for (int ci = 1; ci < context.length; ci++) {
sb.append(" ").append(context[ci]);
if (values != null) {
sb.append("=").append(values[ci]);
}
}
sb.append("]");
return sb.toString();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/FileEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.StringTokenizer;
import opennlp.tools.util.ObjectStream;
/**
* Class for using a file of events as an event stream. The format of the file is one event perline with
* each line consisting of outcome followed by contexts (space delimited).
*/
public class FileEventStream implements ObjectStream<Event> {
protected final BufferedReader reader;
/**
* Creates a new file event stream from the specified file name.
* @param fileName the name fo the file containing the events.
* @throws IOException When the specified file can not be read.
*/
public FileEventStream(String fileName, String encoding) throws IOException {
this(encoding == null ?
new FileReader(fileName) : new InputStreamReader(new FileInputStream(fileName), encoding));
}
public FileEventStream(String fileName) throws IOException {
this(fileName,null);
}
public FileEventStream(Reader reader) throws IOException {
this.reader = new BufferedReader(reader);
}
/**
* Creates a new file event stream from the specified file.
* @param file the file containing the events.
* @throws IOException When the specified file can not be read.
*/
public FileEventStream(File file) throws IOException {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(file),"UTF8"));
}
@Override
public Event read() throws IOException {
String line;
if ((line = reader.readLine()) != null) {
StringTokenizer st = new StringTokenizer(line);
String outcome = st.nextToken();
int count = st.countTokens();
String[] context = new String[count];
for (int ci = 0; ci < count; ci++) {
context[ci] = st.nextToken();
}
return new Event(outcome, context);
}
else {
return null;
}
}
public void close() throws IOException {
reader.close();
}
/**
* Generates a string representing the specified event.
* @param event The event for which a string representation is needed.
* @return A string representing the specified event.
*/
public static String toLine(Event event) {
StringBuilder sb = new StringBuilder();
sb.append(event.getOutcome());
String[] context = event.getContext();
for (int ci = 0,cl = context.length; ci < cl; ci++) {
sb.append(" ").append(context[ci]);
}
sb.append(System.getProperty("line.separator"));
return sb.toString();
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/GenericModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.maxent.io.GISModelReader;
import opennlp.tools.ml.maxent.io.QNModelReader;
import opennlp.tools.ml.naivebayes.NaiveBayesModelReader;
import opennlp.tools.ml.perceptron.PerceptronModelReader;
public class GenericModelReader extends AbstractModelReader {
private AbstractModelReader delegateModelReader;
public GenericModelReader(File f) throws IOException {
super(f);
}
public GenericModelReader(DataReader dataReader) {
super(dataReader);
}
public void checkModelType() throws IOException {
String modelType = readUTF();
switch (modelType) {
case "Perceptron":
delegateModelReader = new PerceptronModelReader(this.dataReader);
break;
case "GIS":
delegateModelReader = new GISModelReader(this.dataReader);
break;
case "QN":
delegateModelReader = new QNModelReader(this.dataReader);
break;
case "NaiveBayes":
delegateModelReader = new NaiveBayesModelReader(this.dataReader);
break;
default:
throw new IOException("Unknown model format: " + modelType);
}
}
public AbstractModel constructModel() throws IOException {
return delegateModelReader.constructModel();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/GenericModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.zip.GZIPOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.maxent.io.BinaryGISModelWriter;
import opennlp.tools.ml.maxent.io.BinaryQNModelWriter;
import opennlp.tools.ml.model.AbstractModel.ModelType;
import opennlp.tools.ml.naivebayes.BinaryNaiveBayesModelWriter;
import opennlp.tools.ml.perceptron.BinaryPerceptronModelWriter;
public class GenericModelWriter extends AbstractModelWriter {
private AbstractModelWriter delegateWriter;
public GenericModelWriter(AbstractModel model, File file) throws IOException {
String filename = file.getName();
OutputStream os;
// handle the zipped/not zipped distinction
if (filename.endsWith(".gz")) {
os = new GZIPOutputStream(new FileOutputStream(file));
filename = filename.substring(0, filename.length() - 3);
} else {
os = new FileOutputStream(file);
}
init(model, new EncryptedDataOutputStream(os));
}
public GenericModelWriter(AbstractModel model, EncryptedDataOutputStream dos) {
init(model, dos);
}
private void init(AbstractModel model, EncryptedDataOutputStream dos) {
if (model.getModelType() == ModelType.Perceptron) {
delegateWriter = new BinaryPerceptronModelWriter(model, dos);
} else if (model.getModelType() == ModelType.Maxent) {
delegateWriter = new BinaryGISModelWriter(model, dos);
} else if (model.getModelType() == ModelType.MaxentQn) {
delegateWriter = new BinaryQNModelWriter(model, dos);
}
if (model.getModelType() == ModelType.NaiveBayes) {
delegateWriter = new BinaryNaiveBayesModelWriter(model, dos);
}
}
@Override
public void close() throws IOException {
delegateWriter.close();
}
@Override
public void persist() throws IOException {
delegateWriter.persist();
}
@Override
public void writeDouble(double d) throws IOException {
delegateWriter.writeDouble(d);
}
@Override
public void writeInt(int i) throws IOException {
delegateWriter.writeInt(i);
}
@Override
public void writeUTF(String s) throws IOException {
delegateWriter.writeUTF(s);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/HashSumEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import opennlp.tools.util.AbstractObjectStream;
import opennlp.tools.util.ObjectStream;
public class HashSumEventStream extends AbstractObjectStream<Event> {
private MessageDigest digest;
public HashSumEventStream(ObjectStream<Event> eventStream) {
super(eventStream);
try {
digest = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
// should never happen, does all java runtimes have md5 ?!
throw new IllegalStateException(e);
}
}
@Override
public Event read() throws IOException {
Event event = super.read();
if (event != null) {
digest.update(event.toString().getBytes(StandardCharsets.UTF_8));
}
return event;
}
/**
* Calculates the hash sum of the stream. The method must be
* called after the stream is completely consumed.
*
* @return the hash sum
* @throws IllegalStateException if the stream is not consumed completely,
* completely means that hasNext() returns false
*/
public BigInteger calculateHashSum() {
return new BigInteger(1, digest.digest());
}
public void remove() {
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/MaxentModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
/**
* Interface for maximum entropy models.
**/
public interface MaxentModel {
/**
* Evaluates a context.
*
* @param context A list of String names of the contextual predicates
* which are to be evaluated together.
* @return an array of the probabilities for each of the different
* outcomes, all of which sum to 1.
*
**/
double[] eval(String[] context);
/**
* Evaluates a context.
*
* @param context A list of String names of the contextual predicates
* which are to be evaluated together.
* @param probs An array which is populated with the probabilities for each of the different
* outcomes, all of which sum to 1.
* @return an array of the probabilities for each of the different outcomes, all of which sum to 1.
**/
double[] eval(String[] context, double[] probs);
/**
* Evaluates a contexts with the specified context values.
* @param context A list of String names of the contextual predicates
* which are to be evaluated together.
* @param values The values associated with each context.
* @return an array of the probabilities for each of the different outcomes, all of which sum to 1.
*/
double[] eval(String[] context, float[] values);
/**
* Simple function to return the outcome associated with the index
* containing the highest probability in the double[].
*
* @param outcomes A <code>double[]</code> as returned by the
* <code>eval(String[] context)</code>
* method.
* @return the String name of the best outcome
**/
String getBestOutcome(double[] outcomes);
/**
* Return a string matching all the outcome names with all the
* probabilities produced by the <code>eval(String[]
* context)</code> method.
*
* @param outcomes A <code>double[]</code> as returned by the
* <code>eval(String[] context)</code>
* method.
* @return String containing outcome names paired with the normalized
* probability (contained in the <code>double[] ocs</code>)
* for each one.
**/
// TODO: This should be removed, can't be used anyway without format spec
String getAllOutcomes(double[] outcomes);
/**
* Gets the String name of the outcome associated with the index
* i.
*
* @param i the index for which the name of the associated outcome is
* desired.
* @return the String name of the outcome
**/
String getOutcome(int i);
/**
* Gets the index associated with the String name of the given
* outcome.
*
* @param outcome the String name of the outcome for which the
* index is desired
* @return the index if the given outcome label exists for this
* model, -1 if it does not.
**/
int getIndex(String outcome);
/*
* Returns the data structures relevant to storing the model.
**/
// public Object[] getDataStructures();
/** Returns the number of outcomes for this model.
* @return The number of outcomes.
**/
int getNumOutcomes();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/MutableContext.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Arrays;
/**
* Class used to store parameters or expected values associated with this context which
* can be updated or assigned.
*/
public class MutableContext extends Context {
/**
* Creates a new parameters object with the specified parameters associated with the specified
* outcome pattern.
*
* @param outcomePattern Array of outcomes for which parameters exists for this context.
* @param parameters Parameters for the outcomes specified.
*/
public MutableContext(int[] outcomePattern, double[] parameters) {
super(outcomePattern, parameters);
}
/**
* Assigns the parameter or expected value at the specified outcomeIndex the specified value.
*
* @param outcomeIndex The index of the parameter or expected value to be updated.
* @param value The value to be assigned.
*/
public void setParameter(int outcomeIndex, double value) {
parameters[outcomeIndex] = value;
}
/**
* Updated the parameter or expected value at the specified outcomeIndex by
* adding the specified value to its current value.
*
* @param outcomeIndex The index of the parameter or expected value to be updated.
* @param value The value to be added.
*/
public void updateParameter(int outcomeIndex, double value) {
parameters[outcomeIndex] += value;
}
public boolean contains(int outcome) {
return Arrays.binarySearch(outcomes,outcome) >= 0;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/ObjectDataReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.io.ObjectInputStream;
public class ObjectDataReader implements DataReader {
protected ObjectInputStream ois;
public ObjectDataReader(ObjectInputStream ois) {
this.ois = ois;
}
public double readDouble() throws IOException {
return ois.readDouble();
}
public int readInt() throws IOException {
return ois.readInt();
}
public String readUTF() throws IOException {
return ois.readUTF();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/OnePassDataIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.ObjectStreamUtils;
/**
* An indexer for maxent model data which handles cutoffs for uncommon
* contextual predicates and provides a unique integer index for each of the
* predicates.
*/
public class OnePassDataIndexer extends AbstractDataIndexer {
public OnePassDataIndexer(){}
@Override
public void index(ObjectStream<Event> eventStream) throws IOException {
int cutoff = trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
boolean sort = trainingParameters.getBooleanParameter(SORT_PARAM, SORT_DEFAULT);
long start = System.currentTimeMillis();
display("Indexing events with OnePass using cutoff of " + cutoff + "\n\n");
display("\tComputing event counts... ");
Map<String, Integer> predicateIndex = new HashMap<>();
List<Event> events = computeEventCounts(eventStream, predicateIndex, cutoff);
display("done. " + events.size() + " events\n");
display("\tIndexing... ");
List<ComparableEvent> eventsToCompare =
index(ObjectStreamUtils.createObjectStream(events), predicateIndex);
display("done.\n");
display("Sorting and merging events... ");
sortAndMerge(eventsToCompare, sort);
display(String.format("Done indexing in %.2f s.\n", (System.currentTimeMillis() - start) / 1000d));
}
/**
* Reads events from <tt>eventStream</tt> into a linked list. The predicates
* associated with each event are counted and any which occur at least
* <tt>cutoff</tt> times are added to the <tt>predicatesInOut</tt> map along
* with a unique integer index.
*
* @param eventStream
* an <code>EventStream</code> value
* @param predicatesInOut
* a <code>TObjectIntHashMap</code> value
* @param cutoff
* an <code>int</code> value
* @return a <code>TLinkedList</code> value
*/
private List<Event> computeEventCounts(ObjectStream<Event> eventStream,
Map<String, Integer> predicatesInOut, int cutoff) throws IOException {
Map<String, Integer> counter = new HashMap<>();
List<Event> events = new LinkedList<>();
Event ev;
while ((ev = eventStream.read()) != null) {
events.add(ev);
update(ev.getContext(), counter);
}
String[] predicateSet = counter.entrySet().stream()
.filter(entry -> entry.getValue() >= cutoff)
.map(Map.Entry::getKey).sorted()
.toArray(String[]::new);
predCounts = new int[predicateSet.length];
for (int i = 0; i < predicateSet.length; i++) {
predCounts[i] = counter.get(predicateSet[i]);
predicatesInOut.put(predicateSet[i], i);
}
return events;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/OnePassRealValueDataIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.List;
import opennlp.tools.util.InsufficientTrainingDataException;
/**
* An indexer for maxent model data which handles cutoffs for uncommon
* contextual predicates and provides a unique integer index for each of the
* predicates and maintains event values.
*/
public class OnePassRealValueDataIndexer extends OnePassDataIndexer {
float[][] values;
public OnePassRealValueDataIndexer() {
}
public float[][] getValues() {
return values;
}
protected int sortAndMerge(List<ComparableEvent> eventsToCompare, boolean sort)
throws InsufficientTrainingDataException {
int numUniqueEvents = super.sortAndMerge(eventsToCompare,sort);
values = new float[numUniqueEvents][];
int numEvents = eventsToCompare.size();
for (int i = 0, j = 0; i < numEvents; i++) {
ComparableEvent evt = eventsToCompare.get(i);
if (null == evt) {
continue; // this was a dupe, skip over it.
}
values[j++] = evt.values;
}
return numUniqueEvents;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/PlainTextFileDataReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.zip.GZIPInputStream;
public class PlainTextFileDataReader implements DataReader {
private BufferedReader input;
public PlainTextFileDataReader(File f) throws IOException {
if (f.getName().endsWith(".gz")) {
input = new BufferedReader(new InputStreamReader(new BufferedInputStream(
new GZIPInputStream(new BufferedInputStream(new FileInputStream(f))))));
}
else {
input = new BufferedReader(new InputStreamReader(new BufferedInputStream(new FileInputStream(f))));
}
}
public PlainTextFileDataReader(InputStream in) {
input = new BufferedReader(new InputStreamReader(in));
}
public PlainTextFileDataReader(BufferedReader in) {
input = in;
}
public double readDouble() throws IOException {
return Double.parseDouble(input.readLine());
}
public int readInt() throws IOException {
return Integer.parseInt(input.readLine());
}
public String readUTF() throws IOException {
return input.readLine();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/Prior.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
/**
* This interface allows one to implement a prior distribution for use in
* maximum entropy model training.
*/
public interface Prior {
/**
* Populates the specified array with the the log of the distribution for the specified context.
* The returned array will be overwritten and needs to be re-initialized with every call to this method.
* @param dist An array to be populated with the log of the prior distribution.
* @param context The indices of the contextual predicates for an event.
*/
void logPrior(double[] dist, int[] context);
/**
* Populates the specified array with the the log of the distribution for the specified context.
* The returned array will be overwritten and needs to be re-initialized with every call to this method.
* @param dist An array to be populated with the log of the prior distribution.
* @param context The indices of the contextual predicates for an event.
* @param values The values associated with the context.
*/
void logPrior(double[] dist, int[] context, float[] values);
/**
* Populates the specified array with the the log of the distribution for the specified context.
* The returned array will be overwritten and needs to be re-initialized with every call to this method.
* @param dist An array to be populated with the log of the prior distribution.
* @param context The indices of the contextual predicates for an event.
* @param values The values associated with the context.
*/
void logPrior(double[] dist, Context[] context, float[] values);
/**
* Method to specify the label for the outcomes and contexts. This is used to map
* integer outcomes and contexts to their string values. This method is called prior
* to any call to #logPrior.
* @param outcomeLabels An array of each outcome label.
* @param contextLabels An array of each context label.
*/
void setLabels(String[] outcomeLabels, String[] contextLabels);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/RealValueFileEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.File;
import java.io.IOException;
public class RealValueFileEventStream extends FileEventStream {
public RealValueFileEventStream(String fileName) throws IOException {
super(fileName);
}
public RealValueFileEventStream(String fileName, String encoding) throws IOException {
super(fileName, encoding);
}
public RealValueFileEventStream(File file) throws IOException {
super(file);
}
/**
* Parses the specified contexts and re-populates context array with features
* and returns the values for these features. If all values are unspecified,
* then null is returned.
*
* @param contexts The contexts with real values specified.
* @return The value for each context or null if all values are unspecified.
*/
public static float[] parseContexts(String[] contexts) {
boolean hasRealValue = false;
float[] values = new float[contexts.length];
for (int ci = 0; ci < contexts.length; ci++) {
int ei = contexts[ci].lastIndexOf("=");
if (ei > 0 && ei + 1 < contexts[ci].length()) {
boolean gotReal = true;
try {
values[ci] = Float.parseFloat(contexts[ci].substring(ei + 1));
} catch (NumberFormatException e) {
gotReal = false;
System.err.println("Unable to determine value in context:" + contexts[ci]);
values[ci] = 1;
}
if (gotReal) {
if (values[ci] < 0) {
throw new RuntimeException("Negative values are not allowed: " + contexts[ci]);
}
contexts[ci] = contexts[ci].substring(0, ei);
hasRealValue = true;
}
} else {
values[ci] = 1;
}
}
if (!hasRealValue) {
values = null;
}
return values;
}
@Override
public Event read() throws IOException {
String line;
if ((line = reader.readLine()) != null) {
int si = line.indexOf(' ');
String outcome = line.substring(0, si);
String[] contexts = line.substring(si + 1).split(" ");
float[] values = parseContexts(contexts);
return new Event(outcome, contexts, values);
}
return null;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/Sequence.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
/**
* Class which models a sequence.
* @param <T> The type of the object which is the source of this sequence.
*/
public class Sequence<T> {
private Event[] events;
private T source;
/**
* Creates a new sequence made up of the specified events and derived from the
* specified source.
*
* @param events
* The events of the sequence.
* @param source
* The source object for this sequence.
*/
public Sequence(Event[] events, T source) {
this.events = events;
this.source = source;
}
/**
* Returns the events which make up this sequence.
*
* @return the events which make up this sequence.
*/
public Event[] getEvents() {
return events;
}
/**
* Returns an object from which this sequence can be derived. This object is
* used when the events for this sequence need to be re-derived such as in a
* call to SequenceStream.updateContext.
*
* @return an object from which this sequence can be derived.
*/
public T getSource() {
return source;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/SequenceClassificationModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import opennlp.tools.util.BeamSearchContextGenerator;
import opennlp.tools.util.Sequence;
import opennlp.tools.util.SequenceValidator;
/**
* A classification model that can label an input sequence.
*
* @param <T>
*/
public interface SequenceClassificationModel<T> {
/**
* Finds the sequence with the highest probability.
*
* @param sequence
* @param additionalContext
* @param cg
* @param validator
*
* @return
*/
Sequence bestSequence(T[] sequence, Object[] additionalContext,
BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator);
/**
* Finds the n most probable sequences.
*
* @param sequence
* @param additionalContext
* @param cg
* @param validator
*
* @return
*/
Sequence[] bestSequences(int numSequences, T[] sequence, Object[] additionalContext,
double minSequenceScore, BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator);
/**
* Finds the n most probable sequences.
*
* @param sequence
* @param additionalContext
* @param cg
* @param validator
*
* @return
*/
Sequence[] bestSequences(int numSequences, T[] sequence,
Object[] additionalContext, BeamSearchContextGenerator<T> cg, SequenceValidator<T> validator);
/**
* Returns all possible outcomes.
*
* @return
*/
String[] getOutcomes();
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/SequenceStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import opennlp.tools.util.ObjectStream;
/**
* Interface for streams of sequences used to train sequence models.
*/
public interface SequenceStream extends ObjectStream<Sequence> {
/**
* Creates a new event array based on the outcomes predicted by the specified parameters
* for the specified sequence.
* @param sequence The sequence to be evaluated.
* @return event array
*/
Event[] updateContext(Sequence sequence, AbstractModel model);
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/SequenceStreamEventStream.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import opennlp.tools.util.ObjectStream;
/**
* Class which turns a sequence stream into an event stream.
*/
public class SequenceStreamEventStream implements ObjectStream<Event> {
private final SequenceStream sequenceStream;
private Iterator<Event> eventIt = Collections.emptyListIterator();
public SequenceStreamEventStream(SequenceStream sequenceStream) {
this.sequenceStream = sequenceStream;
}
@Override
public Event read() throws IOException {
while (!eventIt.hasNext()) {
Sequence<?> sequence = sequenceStream.read();
if (sequence == null) {
return null;
}
eventIt = Arrays.asList(sequence.getEvents()).iterator();
}
return eventIt.next();
}
@Override
public void reset() throws IOException, UnsupportedOperationException {
eventIt = Collections.emptyListIterator();
sequenceStream.reset();
}
@Override
public void close() throws IOException {
eventIt = Collections.emptyListIterator();
sequenceStream.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/TwoPassDataIndexer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import opennlp.tools.util.ObjectStream;
/**
* Collecting event and context counts by making two passes over the events. The
* first pass determines which contexts will be used by the model, and the
* second pass creates the events in memory containing only the contexts which
* will be used. This greatly reduces the amount of memory required for storing
* the events. During the first pass a temporary event file is created which
* is read during the second pass.
*/
public class TwoPassDataIndexer extends AbstractDataIndexer {
public TwoPassDataIndexer() {}
@Override
public void index(ObjectStream<Event> eventStream) throws IOException {
int cutoff = trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
boolean sort = trainingParameters.getBooleanParameter(SORT_PARAM, SORT_DEFAULT);
long start = System.currentTimeMillis();
display("Indexing events with TwoPass using cutoff of " + cutoff + "\n\n");
display("\tComputing event counts... ");
Map<String,Integer> predicateIndex = new HashMap<>();
File tmp = File.createTempFile("events", null);
tmp.deleteOnExit();
int numEvents;
try (Writer osw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmp),
StandardCharsets.UTF_8))) {
numEvents = computeEventCounts(eventStream, osw, predicateIndex, cutoff);
}
display("done. " + numEvents + " events\n");
display("\tIndexing... ");
List<ComparableEvent> eventsToCompare;
try (FileEventStream fes = new FileEventStream(tmp)) {
eventsToCompare = index(fes, predicateIndex);
}
tmp.delete();
display("done.\n");
if (sort) {
display("Sorting and merging events... ");
}
else {
display("Collecting events... ");
}
sortAndMerge(eventsToCompare,sort);
display(String.format("Done indexing in %.2f s.\n", (System.currentTimeMillis() - start) / 1000d));
}
/**
* Reads events from <tt>eventStream</tt> into a linked list. The
* predicates associated with each event are counted and any which
* occur at least <tt>cutoff</tt> times are added to the
* <tt>predicatesInOut</tt> map along with a unique integer index.
*
* @param eventStream an <code>EventStream</code> value
* @param eventStore a writer to which the events are written to for later processing.
* @param predicatesInOut a <code>TObjectIntHashMap</code> value
* @param cutoff an <code>int</code> value
*/
private int computeEventCounts(ObjectStream<Event> eventStream, Writer eventStore,
Map<String,Integer> predicatesInOut, int cutoff) throws IOException {
Map<String,Integer> counter = new HashMap<>();
int eventCount = 0;
Event ev;
while ((ev = eventStream.read()) != null) {
eventCount++;
eventStore.write(FileEventStream.toLine(ev));
String[] ec = ev.getContext();
update(ec, counter);
}
String[] predicateSet = counter.entrySet().stream()
.filter(entry -> entry.getValue() >= cutoff)
.map(Map.Entry::getKey).sorted()
.toArray(String[]::new);
predCounts = new int[predicateSet.length];
for (int i = 0; i < predicateSet.length; i++) {
predCounts[i] = counter.get(predicateSet[i]);
predicatesInOut.put(predicateSet[i], i);
}
return eventCount;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/model/UniformPrior.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.model;
import java.util.Objects;
/**
* Provide a maximum entropy model with a uniform prior.
*/
public class UniformPrior implements Prior {
private int numOutcomes;
private double r;
public void logPrior(double[] dist, int[] context, float[] values) {
for (int oi = 0; oi < numOutcomes; oi++) {
dist[oi] = r;
}
}
@Override
public void logPrior(double[] dist, Context[] context, float[] values) {
logPrior(dist, (int[]) null, values);
}
public void logPrior(double[] dist, int[] context) {
logPrior(dist,context,null);
}
public void setLabels(String[] outcomeLabels, String[] contextLabels) {
this.numOutcomes = outcomeLabels.length;
r = Math.log(1.0 / numOutcomes);
}
@Override
public int hashCode() {
return Objects.hash(numOutcomes, r);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof UniformPrior) {
UniformPrior prior = (UniformPrior) obj;
return numOutcomes == prior.numOutcomes && r == prior.r;
}
return false;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/BinaryNaiveBayesModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.model.BinaryFileDataReader;
public class BinaryNaiveBayesModelReader extends NaiveBayesModelReader {
/**
* Constructor which directly instantiates the DataInputStream containing
* the model contents.
*
* @param dis The DataInputStream containing the model information.
*/
public BinaryNaiveBayesModelReader(DataInputStream dis) {
super(new BinaryFileDataReader(dis));
}
/**
* Constructor which takes a File and creates a reader for it. Detects
* whether the file is gzipped or not based on whether the suffix contains
* ".gz"
*
* @param f The File in which the model is stored.
*/
public BinaryNaiveBayesModelReader(File f) throws IOException {
super(f);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/BinaryNaiveBayesModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.GZIPOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.model.AbstractModel;
/**
* Model writer that saves models in binary format.
*/
public class BinaryNaiveBayesModelWriter extends NaiveBayesModelWriter {
private EncryptedDataOutputStream output;
/**
* Constructor which takes a NaiveBayesModel and a File and prepares itself to
* write the model to that file. Detects whether the file is gzipped or not
* based on whether the suffix contains ".gz".
*
* @param model The NaiveBayesModel which is to be persisted.
* @param f The File in which the model is to be persisted.
*/
public BinaryNaiveBayesModelWriter(AbstractModel model, File f) throws IOException {
super(model);
if (f.getName().endsWith(".gz")) {
output = new EncryptedDataOutputStream(
new GZIPOutputStream(new FileOutputStream(f)));
} else {
output = new EncryptedDataOutputStream(new FileOutputStream(f));
}
}
/**
* Constructor which takes a NaiveBayesModel and a DataOutputStream and prepares
* itself to write the model to that stream.
*
* @param model The NaiveBayesModel which is to be persisted.
* @param dos The stream which will be used to persist the model.
*/
public BinaryNaiveBayesModelWriter(AbstractModel model, EncryptedDataOutputStream dos) {
super(model);
output = dos;
}
public void writeUTF(String s) throws java.io.IOException {
output.writeEncryptedUTF(s);
}
public void writeInt(int i) throws java.io.IOException {
output.writeInt(i);
}
public void writeDouble(double d) throws java.io.IOException {
output.writeDouble(d);
}
public void close() throws java.io.IOException {
output.flush();
output.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/LogProbabilities.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.util.ArrayList;
import java.util.Map;
import java.util.Map.Entry;
/**
* Class implementing the probability distribution over labels returned by
* a classifier as a log of probabilities.
* This is necessary because floating point precision in Java does not allow for high-accuracy
* representation of very low probabilities such as would occur in a text categorizer.
*
* @param <T> the label (category) class
*
*/
public class LogProbabilities<T> extends Probabilities<T> {
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void set(T t, double probability) {
isNormalised = false;
map.put(t, log(probability));
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void set(T t, Probability<T> probability) {
isNormalised = false;
map.put(t, probability.getLog());
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void setIfLarger(T t, double probability) {
double logProbability = log(probability);
Double p = map.get(t);
if (p == null || logProbability > p) {
isNormalised = false;
map.put(t, logProbability);
}
}
/**
* Assigns a log probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the log probability is being assigned
* @param probability the log probability to assign
*/
public void setLog(T t, double probability) {
isNormalised = false;
map.put(t, probability);
}
/**
* Compounds the existing probability mass on the label with the new probability passed in to the method.
*
* @param t the label whose probability mass is being updated
* @param probability the probability weight to add
* @param count the amplifying factor for the probability compounding
*/
public void addIn(T t, double probability, int count) {
isNormalised = false;
Double p = map.get(t);
if (p == null)
p = 0.0;
probability = log(probability) * count;
map.put(t, p + probability);
}
private Map<T, Double> normalize() {
if (isNormalised)
return normalised;
Map<T, Double> temp = createMapDataStructure();
double highestLogProbability = Double.NEGATIVE_INFINITY;
for (Entry<T, Double> entry : map.entrySet()) {
final Double p = entry.getValue();
if (p != null && p > highestLogProbability) {
highestLogProbability = p;
}
}
double sum = 0;
for (Entry<T, Double> entry : map.entrySet()) {
T t = entry.getKey();
Double p = entry.getValue();
if (p != null) {
double temp_p = Math.exp(p - highestLogProbability);
if (!Double.isNaN(temp_p)) {
sum += temp_p;
temp.put(t, temp_p);
}
}
}
for (Entry<T, Double> entry : temp.entrySet()) {
final T t = entry.getKey();
final Double p = entry.getValue();
if (p != null && sum > Double.MIN_VALUE) {
temp.put(t, p / sum);
}
}
normalised = temp;
isNormalised = true;
return temp;
}
private double log(double prob) {
return Math.log(prob);
}
/**
* Returns the probability associated with a label
*
* @param t the label whose probability needs to be returned
* @return the probability associated with the label
*/
public Double get(T t) {
Double d = normalize().get(t);
if (d == null)
return 0.0;
return d;
}
/**
* Returns the log probability associated with a label
*
* @param t the label whose log probability needs to be returned
* @return the log probability associated with the label
*/
public Double getLog(T t) {
Double d = map.get(t);
if (d == null)
return Double.NEGATIVE_INFINITY;
return d;
}
public void discardCountsBelow(double i) {
i = Math.log(i);
ArrayList<T> labelsToRemove = new ArrayList<>();
for (Entry<T, Double> entry : map.entrySet()) {
final T label = entry.getKey();
Double sum = entry.getValue();
if (sum == null) sum = Double.NEGATIVE_INFINITY;
if (sum < i)
labelsToRemove.add(label);
}
for (T label : labelsToRemove) {
map.remove(label);
}
}
/**
* Returns the probabilities associated with all labels
*
* @return the HashMap of labels and their probabilities
*/
public Map<T, Double> getAll() {
return normalize();
}
/**
* Returns the most likely label
*
* @return the label that has the highest associated probability
*/
public T getMax() {
double max = Double.NEGATIVE_INFINITY;
T maxT = null;
for (Entry<T, Double> entry : map.entrySet()) {
final T t = entry.getKey();
final Double temp = entry.getValue();
if (temp >= max) {
max = temp;
maxT = t;
}
}
return maxT;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/LogProbability.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
/**
* Class implementing the probability for a label.
*
* @param <T> the label (category) class
*
*/
public class LogProbability<T> extends Probability<T> {
public LogProbability(T label) {
super(label);
set(1.0);
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param probability the probability to assign
*/
public void set(double probability) {
this.probability = Math.log(probability);
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param probability the probability to assign
*/
public void set(Probability probability) {
this.probability = probability.getLog();
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param probability the probability to assign
*/
public void setIfLarger(double probability) {
double logP = Math.log(probability);
if (this.probability < logP) {
this.probability = logP;
}
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param probability the probability to assign
*/
public void setIfLarger(Probability probability) {
if (this.probability < probability.getLog()) {
this.probability = probability.getLog();
}
}
/**
* Checks if a probability is greater than the old one.
*
* @param probability the probability to assign
*/
public boolean isLarger(Probability probability) {
return this.probability < probability.getLog();
}
/**
* Assigns a log probability to a label, discarding any previously assigned probability.
*
* @param probability the log probability to assign
*/
public void setLog(double probability) {
this.probability = probability;
}
/**
* Compounds the existing probability mass on the label with the new
* probability passed in to the method.
*
* @param probability the probability weight to add
*/
public void addIn(double probability) {
setLog(this.probability + Math.log(probability));
}
/**
* Returns the probability associated with a label
*
* @return the probability associated with the label
*/
public Double get() {
return Math.exp(probability);
}
/**
* Returns the log probability associated with a label
*
* @return the log probability associated with the label
*/
public Double getLog() {
return probability;
}
/**
* Returns the probabilities associated with all labels
*
* @return the HashMap of labels and their probabilities
*/
public T getLabel() {
return label;
}
public String toString() {
return label.toString() + ":" + probability;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/NaiveBayesEvalParameters.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.EvalParameters;
/**
* Parameters for the evalution of a naive bayes classifier
*/
public class NaiveBayesEvalParameters extends EvalParameters {
protected double[] outcomeTotals;
protected long vocabulary;
public NaiveBayesEvalParameters(Context[] params, int numOutcomes,
double[] outcomeTotals, long vocabulary) {
super(params, numOutcomes);
this.outcomeTotals = outcomeTotals;
this.vocabulary = vocabulary;
}
public double[] getOutcomeTotals() {
return outcomeTotals;
}
public long getVocabulary() {
return vocabulary;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/NaiveBayesModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.util.Map;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.EvalParameters;
/**
* Class implementing the multinomial Naive Bayes classifier model.
*/
public class NaiveBayesModel extends AbstractModel {
protected double[] outcomeTotals;
protected long vocabulary;
NaiveBayesModel(Context[] params, String[] predLabels, Map<String, Context> pmap,
String[] outcomeNames) {
super(params, predLabels, pmap, outcomeNames);
outcomeTotals = initOutcomeTotals(outcomeNames, params);
this.evalParams = new NaiveBayesEvalParameters(params, outcomeNames.length,
outcomeTotals, predLabels.length);
modelType = ModelType.NaiveBayes;
}
public NaiveBayesModel(Context[] params, String[] predLabels, String[] outcomeNames) {
super(params, predLabels, outcomeNames);
outcomeTotals = initOutcomeTotals(outcomeNames, params);
this.evalParams = new NaiveBayesEvalParameters(params, outcomeNames.length,
outcomeTotals, predLabels.length);
modelType = ModelType.NaiveBayes;
}
protected double[] initOutcomeTotals(String[] outcomeNames, Context[] params) {
double[] outcomeTotals = new double[outcomeNames.length];
for (int i = 0; i < params.length; ++i) {
Context context = params[i];
for (int j = 0; j < context.getOutcomes().length; ++j) {
int outcome = context.getOutcomes()[j];
double count = context.getParameters()[j];
outcomeTotals[outcome] += count;
}
}
return outcomeTotals;
}
public double[] eval(String[] context) {
return eval(context, new double[evalParams.getNumOutcomes()]);
}
public double[] eval(String[] context, float[] values) {
return eval(context, values, new double[evalParams.getNumOutcomes()]);
}
public double[] eval(String[] context, double[] probs) {
return eval(context, null, probs);
}
public double[] eval(String[] context, float[] values, double[] outsums) {
Context[] scontexts = new Context[context.length];
java.util.Arrays.fill(outsums, 0);
for (int i = 0; i < context.length; i++) {
scontexts[i] = pmap.get(context[i]);
}
return eval(scontexts, values, outsums, evalParams, true);
}
public static double[] eval(int[] context, double[] prior, EvalParameters model) {
return eval(context, null, prior, model, true);
}
static double[] eval(Context[] context, float[] values, double[] prior,
EvalParameters model, boolean normalize) {
Probabilities<Integer> probabilities = new LogProbabilities<>();
Context[] params = model.getParams();
double[] outcomeTotals = model instanceof NaiveBayesEvalParameters
? ((NaiveBayesEvalParameters) model).getOutcomeTotals() : new double[prior.length];
long vocabulary = model instanceof NaiveBayesEvalParameters
? ((NaiveBayesEvalParameters) model).getVocabulary() : 0;
double[] activeParameters;
int[] activeOutcomes;
double value = 1;
for (int ci = 0; ci < context.length; ci++) {
if (context[ci] != null) {
Context predParams = context[ci];
activeOutcomes = predParams.getOutcomes();
activeParameters = predParams.getParameters();
if (values != null) {
value = values[ci];
}
int ai = 0;
for (int i = 0; i < outcomeTotals.length && ai < activeOutcomes.length; ++i) {
int oid = activeOutcomes[ai];
double numerator = oid == i ? activeParameters[ai++] * value : 0;
double denominator = outcomeTotals[i];
probabilities.addIn(i, getProbability(numerator, denominator, vocabulary, true), 1);
}
}
}
double total = 0;
for (int i = 0; i < outcomeTotals.length; ++i) {
total += outcomeTotals[i];
}
for (int i = 0; i < outcomeTotals.length; ++i) {
double numerator = outcomeTotals[i];
probabilities.addIn(i, numerator / total, 1);
}
for (int i = 0; i < outcomeTotals.length; ++i) {
prior[i] = probabilities.get(i);
}
return prior;
}
static double[] eval(int[] context, float[] values, double[] prior,
EvalParameters model, boolean normalize) {
Context[] scontexts = new Context[context.length];
for (int i = 0; i < context.length; i++) {
scontexts[i] = model.getParams()[context[i]];
}
return eval(scontexts, values, prior, model, normalize);
}
private static double getProbability(double numerator, double denominator,
double vocabulary, boolean isSmoothed) {
if (isSmoothed)
return getSmoothedProbability(numerator, denominator, vocabulary);
else if (denominator == 0 || denominator < Double.MIN_VALUE)
return 0;
else
return 1.0 * numerator / denominator;
}
private static double getSmoothedProbability(double numerator, double denominator, double vocabulary) {
final double delta = 0.05; // Lidstone smoothing
return 1.0 * (numerator + delta) / (denominator + delta * vocabulary);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/NaiveBayesModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelReader;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.DataReader;
/**
* Abstract parent class for readers of NaiveBayes.
*/
public class NaiveBayesModelReader extends AbstractModelReader {
public NaiveBayesModelReader(File file) throws IOException {
super(file);
}
public NaiveBayesModelReader(DataReader dataReader) {
super(dataReader);
}
/**
* Retrieve a model from disk. It assumes that models are saved in the
* following sequence:
*
* <br>NaiveBayes (model type identifier)
* <br>1. # of parameters (int)
* <br>2. # of outcomes (int)
* <br> * list of outcome names (String)
* <br>3. # of different types of outcome patterns (int)
* <br> * list of (int int[])
* <br> [# of predicates for which outcome pattern is true] [outcome pattern]
* <br>4. # of predicates (int)
* <br> * list of predicate names (String)
*
* <p>If you are creating a reader for a format which won't work with this
* (perhaps a database or xml file), override this method and ignore the
* other methods provided in this abstract class.
*
* @return The NaiveBayesModel stored in the format and location specified to
* this NaiveBayesModelReader (usually via its the constructor).
*/
public AbstractModel constructModel() throws IOException {
String[] outcomeLabels = getOutcomes();
int[][] outcomePatterns = getOutcomePatterns();
String[] predLabels = getPredicates();
Context[] params = getParameters(outcomePatterns);
return new NaiveBayesModel(params,
predLabels,
outcomeLabels);
}
public void checkModelType() throws java.io.IOException {
String modelType = readUTF();
if (!modelType.equals("NaiveBayes"))
System.out.println("Error: attempting to load a " + modelType +
" model as a NaiveBayes model." +
" You should expect problems.");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/NaiveBayesModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelWriter;
import opennlp.tools.ml.model.ComparablePredicate;
import opennlp.tools.ml.model.Context;
/**
* Abstract parent class for NaiveBayes writers. It provides the persist method
* which takes care of the structure of a stored document, and requires an
* extending class to define precisely how the data should be stored.
*/
public abstract class NaiveBayesModelWriter extends AbstractModelWriter {
protected Context[] PARAMS;
protected String[] OUTCOME_LABELS;
protected String[] PRED_LABELS;
int numOutcomes;
public NaiveBayesModelWriter(AbstractModel model) {
Object[] data = model.getDataStructures();
this.numOutcomes = model.getNumOutcomes();
PARAMS = (Context[]) data[0];
@SuppressWarnings("unchecked")
Map<String, Context> pmap = (Map<String, Context>) data[1];
OUTCOME_LABELS = (String[]) data[2];
PARAMS = new Context[pmap.size()];
PRED_LABELS = new String[pmap.size()];
int i = 0;
for (Map.Entry<String, Context> pred : pmap.entrySet()) {
PRED_LABELS[i] = pred.getKey();
PARAMS[i] = pred.getValue();
i++;
}
}
protected ComparablePredicate[] sortValues() {
ComparablePredicate[] sortPreds = new ComparablePredicate[PARAMS.length];
int numParams = 0;
for (int pid = 0; pid < PARAMS.length; pid++) {
int[] predkeys = PARAMS[pid].getOutcomes();
// Arrays.sort(predkeys);
int numActive = predkeys.length;
double[] activeParams = PARAMS[pid].getParameters();
numParams += numActive;
/*
* double[] activeParams = new double[numActive];
*
* int id = 0; for (int i=0; i < predkeys.length; i++) { int oid =
* predkeys[i]; activeOutcomes[id] = oid; activeParams[id] =
* PARAMS[pid].getParams(oid); id++; }
*/
sortPreds[pid] = new ComparablePredicate(PRED_LABELS[pid],
predkeys, activeParams);
}
Arrays.sort(sortPreds);
return sortPreds;
}
protected List<List<ComparablePredicate>> compressOutcomes(ComparablePredicate[] sorted) {
List<List<ComparablePredicate>> outcomePatterns = new ArrayList<>();
if (sorted.length > 0) {
ComparablePredicate cp = sorted[0];
List<ComparablePredicate> newGroup = new ArrayList<>();
for (int i = 0; i < sorted.length; i++) {
if (cp.compareTo(sorted[i]) == 0) {
newGroup.add(sorted[i]);
} else {
cp = sorted[i];
outcomePatterns.add(newGroup);
newGroup = new ArrayList<>();
newGroup.add(sorted[i]);
}
}
outcomePatterns.add(newGroup);
}
return outcomePatterns;
}
protected List<List<ComparablePredicate>> computeOutcomePatterns(ComparablePredicate[] sorted) {
ComparablePredicate cp = sorted[0];
List<List<ComparablePredicate>> outcomePatterns = new ArrayList<>();
List<ComparablePredicate> newGroup = new ArrayList<>();
for (ComparablePredicate predicate : sorted) {
if (cp.compareTo(predicate) == 0) {
newGroup.add(predicate);
} else {
cp = predicate;
outcomePatterns.add(newGroup);
newGroup = new ArrayList<>();
newGroup.add(predicate);
}
}
outcomePatterns.add(newGroup);
System.err.println(outcomePatterns.size() + " outcome patterns");
return outcomePatterns;
}
/**
* Writes the model to disk, using the <code>writeX()</code> methods
* provided by extending classes.
*
* <p>If you wish to create a NaiveBayesModelWriter which uses a different
* structure, it will be necessary to override the persist method in
* addition to implementing the <code>writeX()</code> methods.
*/
public void persist() throws IOException {
// the type of model (NaiveBayes)
writeUTF("NaiveBayes");
// the mapping from outcomes to their integer indexes
writeInt(OUTCOME_LABELS.length);
for (String label : OUTCOME_LABELS) {
writeUTF(label);
}
// the mapping from predicates to the outcomes they contributed to.
// The sorting is done so that we actually can write this out more
// compactly than as the entire list.
ComparablePredicate[] sorted = sortValues();
List<List<ComparablePredicate>> compressed = computeOutcomePatterns(sorted);
writeInt(compressed.size());
for (List<ComparablePredicate> a : compressed) {
writeUTF(a.size() + a.get(0).toString());
}
// the mapping from predicate names to their integer indexes
writeInt(sorted.length);
for (ComparablePredicate s : sorted) {
writeUTF(s.name);
}
// write out the parameters
for (int i = 0; i < sorted.length; i++)
for (int j = 0; j < sorted[i].params.length; j++)
writeDouble(sorted[i].params[j]);
close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/NaiveBayesTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.IOException;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.EvalParameters;
import opennlp.tools.ml.model.MutableContext;
import opennlp.tools.util.TrainingParameters;
/**
* Trains models using the combination of EM algorithm and Naive Bayes classifier
* which is described in:
* Text Classification from Labeled and Unlabeled Documents using EM
* Nigam, McCallum, et al paper of 2000
*/
public class NaiveBayesTrainer extends AbstractEventTrainer {
public static final String NAIVE_BAYES_VALUE = "NAIVEBAYES";
/**
* Number of unique events which occurred in the event set.
*/
private int numUniqueEvents;
/**
* Number of events in the event set.
*/
private int numEvents;
/**
* Number of predicates.
*/
private int numPreds;
/**
* Number of outcomes.
*/
private int numOutcomes;
/**
* Records the array of predicates seen in each event.
*/
private int[][] contexts;
/**
* The value associates with each context. If null then context values are assumes to be 1.
*/
private float[][] values;
/**
* List of outcomes for each event i, in context[i].
*/
private int[] outcomeList;
/**
* Records the num of times an event has been seen for each event i, in context[i].
*/
private int[] numTimesEventsSeen;
/**
* Stores the String names of the outcomes. The NaiveBayes only tracks outcomes
* as ints, and so this array is needed to save the model to disk and
* thereby allow users to know what the outcome was in human
* understandable terms.
*/
private String[] outcomeLabels;
/**
* Stores the String names of the predicates. The NaiveBayes only tracks
* predicates as ints, and so this array is needed to save the model to
* disk and thereby allow users to know what the outcome was in human
* understandable terms.
*/
private String[] predLabels;
public NaiveBayesTrainer() {
}
public NaiveBayesTrainer(TrainingParameters parameters) {
super(parameters);
}
public boolean isSortAndMerge() {
return false;
}
public AbstractModel doTrain(DataIndexer indexer) throws IOException {
return this.trainModel(indexer);
}
// << members related to AbstractSequenceTrainer
public AbstractModel trainModel(DataIndexer di) {
display("Incorporating indexed data for training... \n");
contexts = di.getContexts();
values = di.getValues();
numTimesEventsSeen = di.getNumTimesEventsSeen();
numEvents = di.getNumEvents();
numUniqueEvents = contexts.length;
outcomeLabels = di.getOutcomeLabels();
outcomeList = di.getOutcomeList();
predLabels = di.getPredLabels();
numPreds = predLabels.length;
numOutcomes = outcomeLabels.length;
display("done.\n");
display("\tNumber of Event Tokens: " + numUniqueEvents + "\n");
display("\t Number of Outcomes: " + numOutcomes + "\n");
display("\t Number of Predicates: " + numPreds + "\n");
display("Computing model parameters...\n");
MutableContext[] finalParameters = findParameters();
display("...done.\n");
/* Create and return the model ****/
return new NaiveBayesModel(finalParameters, predLabels, outcomeLabels);
}
private MutableContext[] findParameters() {
int[] allOutcomesPattern = new int[numOutcomes];
for (int oi = 0; oi < numOutcomes; oi++)
allOutcomesPattern[oi] = oi;
/* Stores the estimated parameter value of each predicate during iteration. */
MutableContext[] params = new MutableContext[numPreds];
for (int pi = 0; pi < numPreds; pi++) {
params[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
for (int aoi = 0; aoi < numOutcomes; aoi++)
params[pi].setParameter(aoi, 0.0);
}
EvalParameters evalParams = new EvalParameters(params, numOutcomes);
double stepSize = 1;
for (int ei = 0; ei < numUniqueEvents; ei++) {
int targetOutcome = outcomeList[ei];
for (int ni = 0; ni < this.numTimesEventsSeen[ei]; ni++) {
for (int ci = 0; ci < contexts[ei].length; ci++) {
int pi = contexts[ei][ci];
if (values == null) {
params[pi].updateParameter(targetOutcome, stepSize);
} else {
params[pi].updateParameter(targetOutcome, stepSize * values[ei][ci]);
}
}
}
}
// Output the final training stats.
trainingStats(evalParams);
return params;
}
private double trainingStats(EvalParameters evalParams) {
int numCorrect = 0;
for (int ei = 0; ei < numUniqueEvents; ei++) {
for (int ni = 0; ni < this.numTimesEventsSeen[ei]; ni++) {
double[] modelDistribution = new double[numOutcomes];
if (values != null)
NaiveBayesModel.eval(contexts[ei], values[ei], modelDistribution, evalParams, false);
else
NaiveBayesModel.eval(contexts[ei], null, modelDistribution, evalParams, false);
int max = maxIndex(modelDistribution);
if (max == outcomeList[ei])
numCorrect++;
}
}
double trainingAccuracy = (double) numCorrect / numEvents;
display("Stats: (" + numCorrect + "/" + numEvents + ") " + trainingAccuracy + "\n");
return trainingAccuracy;
}
private int maxIndex(double[] values) {
int max = 0;
for (int i = 1; i < values.length; i++)
if (values[i] > values[max])
max = i;
return max;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/PlainTextNaiveBayesModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.model.PlainTextFileDataReader;
public class PlainTextNaiveBayesModelReader extends NaiveBayesModelReader {
/**
* Constructor which directly instantiates the BufferedReader containing
* the model contents.
*
* @param br The BufferedReader containing the model information.
*/
public PlainTextNaiveBayesModelReader(BufferedReader br) {
super(new PlainTextFileDataReader(br));
}
/**
* Constructor which takes a File and creates a reader for it. Detects
* whether the file is gzipped or not based on whether the suffix contains
* ".gz".
*
* @param f The File in which the model is stored.
*/
public PlainTextNaiveBayesModelReader(File f) throws IOException {
super(f);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/PlainTextNaiveBayesModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.zip.GZIPOutputStream;
import opennlp.tools.ml.model.AbstractModel;
/**
* Model writer that saves models in plain text format.
*/
public class PlainTextNaiveBayesModelWriter extends NaiveBayesModelWriter {
private BufferedWriter output;
/**
* Constructor which takes a NaiveBayesModel and a File and prepares itself to
* write the model to that file. Detects whether the file is gzipped or not
* based on whether the suffix contains ".gz".
*
* @param model The NaiveBayesModel which is to be persisted.
* @param f The File in which the model is to be persisted.
*/
public PlainTextNaiveBayesModelWriter(AbstractModel model, File f)
throws IOException {
super(model);
if (f.getName().endsWith(".gz")) {
output = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f))));
} else {
output = new BufferedWriter(new FileWriter(f));
}
}
/**
* Constructor which takes a NaiveBayesModel and a BufferedWriter and prepares
* itself to write the model to that writer.
*
* @param model The NaiveBayesModel which is to be persisted.
* @param bw The BufferedWriter which will be used to persist the model.
*/
public PlainTextNaiveBayesModelWriter(AbstractModel model, BufferedWriter bw) {
super(model);
output = bw;
}
public void writeUTF(String s) throws java.io.IOException {
output.write(s);
output.newLine();
}
public void writeInt(int i) throws java.io.IOException {
output.write(Integer.toString(i));
output.newLine();
}
public void writeDouble(double d) throws java.io.IOException {
output.write(Double.toString(d));
output.newLine();
}
public void close() throws java.io.IOException {
output.flush();
output.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/Probabilities.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* Class implementing the probability distribution over labels returned by a classifier.
*
* @param <T> the label (category) class
*
*/
public abstract class Probabilities<T> {
protected Map<T, Double> map = new HashMap<>();
protected transient boolean isNormalised = false;
protected Map<T, Double> normalised;
protected double confidence = 0.0;
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void set(T t, double probability) {
isNormalised = false;
map.put(t, probability);
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void set(T t, Probability<T> probability) {
isNormalised = false;
map.put(t, probability.get());
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param t the label to which the probability is being assigned
* @param probability the probability to assign
*/
public void setIfLarger(T t, double probability) {
Double p = map.get(t);
if (p == null || probability > p) {
isNormalised = false;
map.put(t, probability);
}
}
/**
* Assigns a log probability to a label, discarding any previously assigned probability.
*
* @param t the label to which the log probability is being assigned
* @param probability the log probability to assign
*/
public void setLog(T t, double probability) {
set(t, Math.exp(probability));
}
/**
* Compounds the existing probability mass on the label with the new probability passed in to the method.
*
* @param t the label whose probability mass is being updated
* @param probability the probability weight to add
* @param count the amplifying factor for the probability compounding
*/
public void addIn(T t, double probability, int count) {
isNormalised = false;
Double p = map.get(t);
if (p == null)
p = 1.0;
probability = Math.pow(probability, count);
map.put(t, p * probability);
}
/**
* Returns the probability associated with a label
*
* @param t the label whose probability needs to be returned
* @return the probability associated with the label
*/
public Double get(T t) {
Double d = normalize().get(t);
if (d == null)
return 0.0;
return d;
}
/**
* Returns the log probability associated with a label
*
* @param t the label whose log probability needs to be returned
* @return the log probability associated with the label
*/
public Double getLog(T t) {
return Math.log(get(t));
}
/**
* Returns the probabilities associated with all labels
*
* @return the HashMap of labels and their probabilities
*/
public Set<T> getKeys() {
return map.keySet();
}
/**
* Returns the probabilities associated with all labels
*
* @return the HashMap of labels and their probabilities
*/
public Map<T, Double> getAll() {
return normalize();
}
private Map<T, Double> normalize() {
if (isNormalised)
return normalised;
Map<T, Double> temp = createMapDataStructure();
double sum = 0;
for (Entry<T, Double> entry : map.entrySet()) {
Double p = entry.getValue();
if (p != null) {
sum += p;
}
}
for (Entry<T, Double> entry : temp.entrySet()) {
T t = entry.getKey();
Double p = entry.getValue();
if (p != null) {
temp.put(t, p / sum);
}
}
normalised = temp;
isNormalised = true;
return temp;
}
protected Map<T, Double> createMapDataStructure() {
return new HashMap<>();
}
/**
* Returns the most likely label
*
* @return the label that has the highest associated probability
*/
public T getMax() {
double max = 0;
T maxT = null;
for (Entry<T, Double> entry : map.entrySet()) {
final T t = entry.getKey();
final Double temp = entry.getValue();
if (temp >= max) {
max = temp;
maxT = t;
}
}
return maxT;
}
/**
* Returns the probability of the most likely label
*
* @return the highest probability
*/
public double getMaxValue() {
return get(getMax());
}
public void discardCountsBelow(double i) {
List<T> labelsToRemove = new ArrayList<>();
for (Entry<T, Double> entry : map.entrySet()) {
T label = entry.getKey();
Double sum = entry.getValue();
if (sum == null) sum = 0.0;
if (sum < i)
labelsToRemove.add(label);
}
for (T label : labelsToRemove) {
map.remove(label);
}
}
/**
* Returns the best confidence with which this set of probabilities has been calculated.
* This is a function of the amount of data that supports the assertion.
* It is also a measure of the accuracy of the estimator of the probability.
*
* @return the best confidence of the probabilities
*/
public double getConfidence() {
return confidence;
}
/**
* Sets the best confidence with which this set of probabilities has been calculated.
* This is a function of the amount of data that supports the assertion.
* It is also a measure of the accuracy of the estimator of the probability.
*
* @param confidence the confidence in the probabilities
*/
public void setConfidence(double confidence) {
this.confidence = confidence;
}
public String toString() {
return getAll().toString();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/naivebayes/Probability.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.naivebayes;
/**
* Class implementing the probability for a label.
*
* @param <T> the label (category) class
*
*/
public class Probability<T> {
protected T label;
protected double probability = 1.0;
public Probability(T label) {
this.label = label;
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param probability the probability to assign
*/
public void set(double probability) {
this.probability = probability;
}
/**
* Assigns a probability to a label, discarding any previously assigned probability.
*
* @param probability the probability to assign
*/
public void set(Probability probability) {
this.probability = probability.get();
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param probability the probability to assign
*/
public void setIfLarger(double probability) {
if (this.probability < probability) {
this.probability = probability;
}
}
/**
* Assigns a probability to a label, discarding any previously assigned probability,
* if the new probability is greater than the old one.
*
* @param probability the probability to assign
*/
public void setIfLarger(Probability probability) {
if (this.probability < probability.get()) {
this.probability = probability.get();
}
}
/**
* Checks if a probability is greater than the old one.
*
* @param probability the probability to assign
*/
public boolean isLarger(Probability probability) {
return this.probability < probability.get();
}
/**
* Assigns a log probability to a label, discarding any previously assigned probability.
*
* @param probability the log probability to assign
*/
public void setLog(double probability) {
set(Math.exp(probability));
}
/**
* Compounds the existing probability mass on the label with the new probability passed in to the method.
*
* @param probability the probability weight to add
*/
public void addIn(double probability) {
set(this.probability * probability);
}
/**
* Returns the probability associated with a label
*
* @return the probability associated with the label
*/
public Double get() {
return probability;
}
/**
* Returns the log probability associated with a label
*
* @return the log probability associated with the label
*/
public Double getLog() {
return Math.log(get());
}
/**
* Returns the probabilities associated with all labels
*
* @return the HashMap of labels and their probabilities
*/
public T getLabel() {
return label;
}
public String toString() {
return label == null ? "" + probability : label.toString() + ":" + probability;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/BinaryPerceptronModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.model.BinaryFileDataReader;
public class BinaryPerceptronModelReader extends PerceptronModelReader {
/**
* Constructor which directly instantiates the DataInputStream containing
* the model contents.
*
* @param dis The DataInputStream containing the model information.
*/
public BinaryPerceptronModelReader(DataInputStream dis) {
super(new BinaryFileDataReader(dis));
}
/**
* Constructor which takes a File and creates a reader for it. Detects
* whether the file is gzipped or not based on whether the suffix contains
* ".gz"
*
* @param f The File in which the model is stored.
*/
public BinaryPerceptronModelReader(File f) throws IOException {
super(f);
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/BinaryPerceptronModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.GZIPOutputStream;
import ai.idylnlp.opennlp.custom.EncryptedDataOutputStream;
import opennlp.tools.ml.model.AbstractModel;
/**
* Model writer that saves models in binary format.
*/
public class BinaryPerceptronModelWriter extends PerceptronModelWriter {
private EncryptedDataOutputStream output;
/**
* Constructor which takes a GISModel and a File and prepares itself to
* write the model to that file. Detects whether the file is gzipped or not
* based on whether the suffix contains ".gz".
*
* @param model The GISModel which is to be persisted.
* @param f The File in which the model is to be persisted.
*/
public BinaryPerceptronModelWriter(AbstractModel model, File f) throws IOException {
super(model);
if (f.getName().endsWith(".gz")) {
output = new EncryptedDataOutputStream(
new GZIPOutputStream(new FileOutputStream(f)));
}
else {
output = new EncryptedDataOutputStream(new FileOutputStream(f));
}
}
/**
* Constructor which takes a GISModel and a DataOutputStream and prepares
* itself to write the model to that stream.
*
* @param model The GISModel which is to be persisted.
* @param dos The stream which will be used to persist the model.
*/
public BinaryPerceptronModelWriter(AbstractModel model, EncryptedDataOutputStream dos) {
super(model);
output = dos;
}
public void writeUTF(String s) throws java.io.IOException {
output.writeEncryptedUTF(s);
}
public void writeInt(int i) throws java.io.IOException {
output.writeInt(i);
}
public void writeDouble(double d) throws java.io.IOException {
output.writeDouble(d);
}
public void close() throws java.io.IOException {
output.flush();
output.close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/PerceptronModel.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.EvalParameters;
public class PerceptronModel extends AbstractModel {
public PerceptronModel(Context[] params, String[] predLabels, String[] outcomeNames) {
super(params,predLabels,outcomeNames);
modelType = ModelType.Perceptron;
}
public double[] eval(String[] context) {
return eval(context,new double[evalParams.getNumOutcomes()]);
}
public double[] eval(String[] context, float[] values) {
return eval(context,values,new double[evalParams.getNumOutcomes()]);
}
public double[] eval(String[] context, double[] probs) {
return eval(context,null,probs);
}
public double[] eval(String[] context, float[] values,double[] outsums) {
Context[] scontexts = new Context[context.length];
java.util.Arrays.fill(outsums, 0);
for (int i = 0; i < context.length; i++) {
scontexts[i] = pmap.get(context[i]);
}
return eval(scontexts,values,outsums,evalParams,true);
}
public static double[] eval(int[] context, double[] prior, EvalParameters model) {
return eval(context,null,prior,model,true);
}
static double[] eval(int[] context, float[] values, double[] prior, EvalParameters model,
boolean normalize) {
Context[] scontexts = new Context[context.length];
for (int i = 0; i < context.length; i++) {
scontexts[i] = model.getParams()[context[i]];
}
return eval(scontexts, values, prior, model, normalize);
}
static double[] eval(Context[] context, float[] values, double[] prior, EvalParameters model,
boolean normalize) {
Context[] params = model.getParams();
double[] activeParameters;
int[] activeOutcomes;
double value = 1;
for (int ci = 0; ci < context.length; ci++) {
if (context[ci] != null) {
Context predParams = context[ci];
activeOutcomes = predParams.getOutcomes();
activeParameters = predParams.getParameters();
if (values != null) {
value = values[ci];
}
for (int ai = 0; ai < activeOutcomes.length; ai++) {
int oid = activeOutcomes[ai];
prior[oid] += activeParameters[ai] * value;
}
}
}
if (normalize) {
int numOutcomes = model.getNumOutcomes();
double maxPrior = 1;
for (int oid = 0; oid < numOutcomes; oid++) {
if (maxPrior < Math.abs(prior[oid]))
maxPrior = Math.abs(prior[oid]);
}
double normal = 0.0;
for (int oid = 0; oid < numOutcomes; oid++) {
prior[oid] = Math.exp(prior[oid] / maxPrior);
normal += prior[oid];
}
for (int oid = 0; oid < numOutcomes; oid++) {
prior[oid] /= normal;
}
}
return prior;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/PerceptronModelReader.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.File;
import java.io.IOException;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelReader;
import opennlp.tools.ml.model.Context;
import opennlp.tools.ml.model.DataReader;
/**
* Abstract parent class for readers of Perceptron.
*
*/
public class PerceptronModelReader extends AbstractModelReader {
public PerceptronModelReader(File file) throws IOException {
super(file);
}
public PerceptronModelReader(DataReader dataReader) {
super(dataReader);
}
/**
* Retrieve a model from disk. It assumes that models are saved in the
* following sequence:
*
* <br>Perceptron (model type identifier)
* <br>1. # of parameters (int)
* <br>2. # of outcomes (int)
* <br> * list of outcome names (String)
* <br>3. # of different types of outcome patterns (int)
* <br> * list of (int int[])
* <br> [# of predicates for which outcome pattern is true] [outcome pattern]
* <br>4. # of predicates (int)
* <br> * list of predicate names (String)
*
* <p>If you are creating a reader for a format which won't work with this
* (perhaps a database or xml file), override this method and ignore the
* other methods provided in this abstract class.
*
* @return The PerceptronModel stored in the format and location specified to
* this PerceptronModelReader (usually via its the constructor).
*/
public AbstractModel constructModel() throws IOException {
String[] outcomeLabels = getOutcomes();
int[][] outcomePatterns = getOutcomePatterns();
String[] predLabels = getPredicates();
Context[] params = getParameters(outcomePatterns);
return new PerceptronModel(params,
predLabels,
outcomeLabels);
}
public void checkModelType() throws java.io.IOException {
String modelType = readUTF();
if (!modelType.equals("Perceptron"))
System.out.println("Error: attempting to load a " + modelType +
" model as a Perceptron model." +
" You should expect problems.");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/PerceptronModelWriter.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.AbstractModelWriter;
import opennlp.tools.ml.model.ComparablePredicate;
import opennlp.tools.ml.model.Context;
/**
* Abstract parent class for Perceptron writers. It provides the persist method
* which takes care of the structure of a stored document, and requires an
* extending class to define precisely how the data should be stored.
*
*/
public abstract class PerceptronModelWriter extends AbstractModelWriter {
protected Context[] PARAMS;
protected String[] OUTCOME_LABELS;
protected String[] PRED_LABELS;
private int numOutcomes;
public PerceptronModelWriter(AbstractModel model) {
Object[] data = model.getDataStructures();
this.numOutcomes = model.getNumOutcomes();
PARAMS = (Context[]) data[0];
Map<String, Context> pmap = (Map<String, Context>) data[1];
OUTCOME_LABELS = (String[]) data[2];
PARAMS = new Context[pmap.size()];
PRED_LABELS = new String[pmap.size()];
int i = 0;
for (Map.Entry<String, Context> pred : pmap.entrySet()) {
PRED_LABELS[i] = pred.getKey();
PARAMS[i] = pred.getValue();
i++;
}
}
protected ComparablePredicate[] sortValues() {
ComparablePredicate[] sortPreds;
ComparablePredicate[] tmpPreds = new ComparablePredicate[PARAMS.length];
int[] tmpOutcomes = new int[numOutcomes];
double[] tmpParams = new double[numOutcomes];
int numPreds = 0;
//remove parameters with 0 weight and predicates with no parameters
for (int pid = 0; pid < PARAMS.length; pid++) {
int numParams = 0;
double[] predParams = PARAMS[pid].getParameters();
int[] outcomePattern = PARAMS[pid].getOutcomes();
for (int pi = 0; pi < predParams.length; pi++) {
if (predParams[pi] != 0d) {
tmpOutcomes[numParams] = outcomePattern[pi];
tmpParams[numParams] = predParams[pi];
numParams++;
}
}
int[] activeOutcomes = new int[numParams];
double[] activeParams = new double[numParams];
for (int pi = 0; pi < numParams; pi++) {
activeOutcomes[pi] = tmpOutcomes[pi];
activeParams[pi] = tmpParams[pi];
}
if (numParams != 0) {
tmpPreds[numPreds] = new ComparablePredicate(PRED_LABELS[pid],activeOutcomes,activeParams);
numPreds++;
}
}
System.err.println("Compressed " + PARAMS.length + " parameters to " + numPreds);
sortPreds = new ComparablePredicate[numPreds];
System.arraycopy(tmpPreds, 0, sortPreds, 0, numPreds);
Arrays.sort(sortPreds);
return sortPreds;
}
protected List<List<ComparablePredicate>> computeOutcomePatterns(ComparablePredicate[] sorted) {
ComparablePredicate cp = sorted[0];
List<List<ComparablePredicate>> outcomePatterns = new ArrayList<>();
List<ComparablePredicate> newGroup = new ArrayList<>();
for (ComparablePredicate predicate : sorted) {
if (cp.compareTo(predicate) == 0) {
newGroup.add(predicate);
} else {
cp = predicate;
outcomePatterns.add(newGroup);
newGroup = new ArrayList<>();
newGroup.add(predicate);
}
}
outcomePatterns.add(newGroup);
System.err.println(outcomePatterns.size() + " outcome patterns");
return outcomePatterns;
}
/**
* Writes the model to disk, using the <code>writeX()</code> methods
* provided by extending classes.
*
* <p>If you wish to create a PerceptronModelWriter which uses a different
* structure, it will be necessary to override the persist method in
* addition to implementing the <code>writeX()</code> methods.
*/
public void persist() throws IOException {
// the type of model (Perceptron)
writeUTF("Perceptron");
// the mapping from outcomes to their integer indexes
writeInt(OUTCOME_LABELS.length);
for (String label : OUTCOME_LABELS) {
writeUTF(label);
}
// the mapping from predicates to the outcomes they contributed to.
// The sorting is done so that we actually can write this out more
// compactly than as the entire list.
ComparablePredicate[] sorted = sortValues();
List<List<ComparablePredicate>> compressed = computeOutcomePatterns(sorted);
writeInt(compressed.size());
for (List<ComparablePredicate> a : compressed) {
writeUTF(a.size() + a.get(0).toString());
}
// the mapping from predicate names to their integer indexes
writeInt(sorted.length);
for (ComparablePredicate s : sorted) {
writeUTF(s.name);
}
// write out the parameters
for (int i = 0; i < sorted.length; i++)
for (int j = 0; j < sorted[i].params.length; j++)
writeDouble(sorted[i].params[j]);
close();
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/PerceptronTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.IOException;
import opennlp.tools.ml.AbstractEventTrainer;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.EvalParameters;
import opennlp.tools.ml.model.MutableContext;
import opennlp.tools.util.TrainingParameters;
/**
* Trains models using the perceptron algorithm. Each outcome is represented as
* a binary perceptron classifier. This supports standard (integer) weighting as well
* average weighting as described in:
* Discriminative Training Methods for Hidden Markov Models: Theory and Experiments
* with the Perceptron Algorithm. Michael Collins, EMNLP 2002.
*
*/
public class PerceptronTrainer extends AbstractEventTrainer {
public static final String PERCEPTRON_VALUE = "PERCEPTRON";
public static final double TOLERANCE_DEFAULT = .00001;
/** Number of unique events which occurred in the event set. */
private int numUniqueEvents;
/** Number of events in the event set. */
private int numEvents;
/** Number of predicates. */
private int numPreds;
/** Number of outcomes. */
private int numOutcomes;
/** Records the array of predicates seen in each event. */
private int[][] contexts;
/** The value associates with each context. If null then context values are assumes to be 1. */
private float[][] values;
/** List of outcomes for each event i, in context[i]. */
private int[] outcomeList;
/** Records the num of times an event has been seen for each event i, in context[i]. */
private int[] numTimesEventsSeen;
/** Stores the String names of the outcomes. The GIS only tracks outcomes
as ints, and so this array is needed to save the model to disk and
thereby allow users to know what the outcome was in human
understandable terms. */
private String[] outcomeLabels;
/** Stores the String names of the predicates. The GIS only tracks
predicates as ints, and so this array is needed to save the model to
disk and thereby allow users to know what the outcome was in human
understandable terms. */
private String[] predLabels;
private double tolerance = TOLERANCE_DEFAULT;
private Double stepSizeDecrease;
private boolean useSkippedlAveraging;
public PerceptronTrainer() {
}
public PerceptronTrainer(TrainingParameters parameters) {
super(parameters);
}
@Override
public void validate() {
super.validate();
String algorithmName = getAlgorithm();
if (algorithmName != null) {
if (!PERCEPTRON_VALUE.equals(algorithmName)) {
throw new IllegalArgumentException("algorithmName must be PERCEPTRON");
}
}
}
@Deprecated
@Override
public boolean isValid() {
if (!super.isValid()) {
return false;
}
String algorithmName = getAlgorithm();
if (algorithmName != null) {
return PERCEPTRON_VALUE.equals(algorithmName);
}
else {
return true;
}
}
public boolean isSortAndMerge() {
return false;
}
public AbstractModel doTrain(DataIndexer indexer) throws IOException {
int iterations = getIterations();
int cutoff = getCutoff();
AbstractModel model;
boolean useAverage = trainingParameters.getBooleanParameter("UseAverage", true);
boolean useSkippedAveraging = trainingParameters.getBooleanParameter("UseSkippedAveraging", false);
// overwrite otherwise it might not work
if (useSkippedAveraging)
useAverage = true;
double stepSizeDecrease = trainingParameters.getDoubleParameter("StepSizeDecrease", 0);
double tolerance = trainingParameters.getDoubleParameter("Tolerance",
PerceptronTrainer.TOLERANCE_DEFAULT);
this.setSkippedAveraging(useSkippedAveraging);
if (stepSizeDecrease > 0)
this.setStepSizeDecrease(stepSizeDecrease);
this.setTolerance(tolerance);
model = this.trainModel(iterations, indexer, cutoff, useAverage);
return model;
}
// << members related to AbstractSequenceTrainer
/**
* Specifies the tolerance. If the change in training set accuracy
* is less than this, stop iterating.
*
* @param tolerance
*/
public void setTolerance(double tolerance) {
if (tolerance < 0) {
throw new
IllegalArgumentException("tolerance must be a positive number but is " + tolerance + "!");
}
this.tolerance = tolerance;
}
/**
* Enables and sets step size decrease. The step size is
* decreased every iteration by the specified value.
*
* @param decrease - step size decrease in percent
*/
public void setStepSizeDecrease(double decrease) {
if (decrease < 0 || decrease > 100) {
throw new
IllegalArgumentException("decrease must be between 0 and 100 but is " + decrease + "!");
}
stepSizeDecrease = decrease;
}
/**
* Enables skipped averaging, this flag changes the standard
* averaging to special averaging instead.
* <p>
* If we are doing averaging, and the current iteration is one
* of the first 20 or it is a perfect square, then updated the
* summed parameters.
* <p>
* The reason we don't take all of them is that the parameters change
* less toward the end of training, so they drown out the contributions
* of the more volatile early iterations. The use of perfect
* squares allows us to sample from successively farther apart iterations.
*
* @param averaging averaging flag
*/
public void setSkippedAveraging(boolean averaging) {
useSkippedlAveraging = averaging;
}
public AbstractModel trainModel(int iterations, DataIndexer di, int cutoff) {
return trainModel(iterations,di,cutoff,true);
}
public AbstractModel trainModel(int iterations, DataIndexer di, int cutoff, boolean useAverage) {
display("Incorporating indexed data for training... \n");
contexts = di.getContexts();
values = di.getValues();
numTimesEventsSeen = di.getNumTimesEventsSeen();
numEvents = di.getNumEvents();
numUniqueEvents = contexts.length;
outcomeLabels = di.getOutcomeLabels();
outcomeList = di.getOutcomeList();
predLabels = di.getPredLabels();
numPreds = predLabels.length;
numOutcomes = outcomeLabels.length;
display("done.\n");
display("\tNumber of Event Tokens: " + numUniqueEvents + "\n");
display("\t Number of Outcomes: " + numOutcomes + "\n");
display("\t Number of Predicates: " + numPreds + "\n");
display("Computing model parameters...\n");
MutableContext[] finalParameters = findParameters(iterations, useAverage);
display("...done.\n");
/* Create and return the model *************/
return new PerceptronModel(finalParameters, predLabels, outcomeLabels);
}
private MutableContext[] findParameters(int iterations, boolean useAverage) {
display("Performing " + iterations + " iterations.\n");
int[] allOutcomesPattern = new int[numOutcomes];
for (int oi = 0; oi < numOutcomes; oi++)
allOutcomesPattern[oi] = oi;
/* Stores the estimated parameter value of each predicate during iteration. */
MutableContext[] params = new MutableContext[numPreds];
for (int pi = 0; pi < numPreds; pi++) {
params[pi] = new MutableContext(allOutcomesPattern,new double[numOutcomes]);
for (int aoi = 0; aoi < numOutcomes; aoi++)
params[pi].setParameter(aoi, 0.0);
}
EvalParameters evalParams = new EvalParameters(params, numOutcomes);
/* Stores the sum of parameter values of each predicate over many iterations. */
MutableContext[] summedParams = new MutableContext[numPreds];
if (useAverage) {
for (int pi = 0; pi < numPreds; pi++) {
summedParams[pi] = new MutableContext(allOutcomesPattern,new double[numOutcomes]);
for (int aoi = 0; aoi < numOutcomes; aoi++)
summedParams[pi].setParameter(aoi, 0.0);
}
}
// Keep track of the previous three accuracies. The difference of
// the mean of these and the current training set accuracy is used
// with tolerance to decide whether to stop.
double prevAccuracy1 = 0.0;
double prevAccuracy2 = 0.0;
double prevAccuracy3 = 0.0;
// A counter for the denominator for averaging.
int numTimesSummed = 0;
double stepsize = 1;
for (int i = 1; i <= iterations; i++) {
// Decrease the stepsize by a small amount.
if (stepSizeDecrease != null)
stepsize *= 1 - stepSizeDecrease;
displayIteration(i);
int numCorrect = 0;
for (int ei = 0; ei < numUniqueEvents; ei++) {
int targetOutcome = outcomeList[ei];
for (int ni = 0; ni < this.numTimesEventsSeen[ei]; ni++) {
// Compute the model's prediction according to the current parameters.
double[] modelDistribution = new double[numOutcomes];
if (values != null)
PerceptronModel.eval(contexts[ei], values[ei], modelDistribution, evalParams, false);
else
PerceptronModel.eval(contexts[ei], null, modelDistribution, evalParams, false);
int maxOutcome = maxIndex(modelDistribution);
// If the predicted outcome is different from the target
// outcome, do the standard update: boost the parameters
// associated with the target and reduce those associated
// with the incorrect predicted outcome.
if (maxOutcome != targetOutcome) {
for (int ci = 0; ci < contexts[ei].length; ci++) {
int pi = contexts[ei][ci];
if (values == null) {
params[pi].updateParameter(targetOutcome, stepsize);
params[pi].updateParameter(maxOutcome, -stepsize);
} else {
params[pi].updateParameter(targetOutcome, stepsize * values[ei][ci]);
params[pi].updateParameter(maxOutcome, -stepsize * values[ei][ci]);
}
}
}
// Update the counts for accuracy.
if (maxOutcome == targetOutcome)
numCorrect++;
}
}
// Calculate the training accuracy and display.
double trainingAccuracy = (double) numCorrect / numEvents;
if (i < 10 || (i % 10) == 0)
display(". (" + numCorrect + "/" + numEvents + ") " + trainingAccuracy + "\n");
// TODO: Make averaging configurable !!!
boolean doAveraging;
doAveraging = useAverage && useSkippedlAveraging && (i < 20 || isPerfectSquare(i)) || useAverage;
if (doAveraging) {
numTimesSummed++;
for (int pi = 0; pi < numPreds; pi++)
for (int aoi = 0; aoi < numOutcomes; aoi++)
summedParams[pi].updateParameter(aoi, params[pi].getParameters()[aoi]);
}
// If the tolerance is greater than the difference between the
// current training accuracy and all of the previous three
// training accuracies, stop training.
if (Math.abs(prevAccuracy1 - trainingAccuracy) < tolerance
&& Math.abs(prevAccuracy2 - trainingAccuracy) < tolerance
&& Math.abs(prevAccuracy3 - trainingAccuracy) < tolerance) {
display("Stopping: change in training set accuracy less than " + tolerance + "\n");
break;
}
// Update the previous training accuracies.
prevAccuracy1 = prevAccuracy2;
prevAccuracy2 = prevAccuracy3;
prevAccuracy3 = trainingAccuracy;
}
// Output the final training stats.
trainingStats(evalParams);
// Create averaged parameters
if (useAverage) {
for (int pi = 0; pi < numPreds; pi++)
for (int aoi = 0; aoi < numOutcomes; aoi++)
summedParams[pi].setParameter(aoi, summedParams[pi].getParameters()[aoi] / numTimesSummed);
return summedParams;
} else {
return params;
}
}
private double trainingStats(EvalParameters evalParams) {
int numCorrect = 0;
for (int ei = 0; ei < numUniqueEvents; ei++) {
for (int ni = 0; ni < this.numTimesEventsSeen[ei]; ni++) {
double[] modelDistribution = new double[numOutcomes];
if (values != null)
PerceptronModel.eval(contexts[ei], values[ei], modelDistribution, evalParams,false);
else
PerceptronModel.eval(contexts[ei], null, modelDistribution, evalParams, false);
int max = maxIndex(modelDistribution);
if (max == outcomeList[ei])
numCorrect++;
}
}
double trainingAccuracy = (double) numCorrect / numEvents;
display("Stats: (" + numCorrect + "/" + numEvents + ") " + trainingAccuracy + "\n");
return trainingAccuracy;
}
private int maxIndex(double[] values) {
int max = 0;
for (int i = 1; i < values.length; i++)
if (values[i] > values[max])
max = i;
return max;
}
private void displayIteration(int i) {
if (i > 10 && (i % 10) != 0)
return;
if (i < 10)
display(" " + i + ": ");
else if (i < 100)
display(" " + i + ": ");
else
display(i + ": ");
}
// See whether a number is a perfect square. Inefficient, but fine
// for our purposes.
private static boolean isPerfectSquare(int n) {
int root = (int) Math.sqrt(n);
return root * root == n;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/ml/perceptron/SimplePerceptronSequenceTrainer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.ml.perceptron;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import opennlp.tools.ml.AbstractEventModelSequenceTrainer;
import opennlp.tools.ml.model.AbstractDataIndexer;
import opennlp.tools.ml.model.AbstractModel;
import opennlp.tools.ml.model.DataIndexer;
import opennlp.tools.ml.model.Event;
import opennlp.tools.ml.model.MutableContext;
import opennlp.tools.ml.model.OnePassDataIndexer;
import opennlp.tools.ml.model.Sequence;
import opennlp.tools.ml.model.SequenceStream;
import opennlp.tools.ml.model.SequenceStreamEventStream;
/**
* Trains models for sequences using the perceptron algorithm. Each outcome is represented as
* a binary perceptron classifier. This supports standard (integer) weighting as well
* average weighting. Sequence information is used in a simplified was to that described in:
* Discriminative Training Methods for Hidden Markov Models: Theory and Experiments
* with the Perceptron Algorithm. Michael Collins, EMNLP 2002.
* Specifically only updates are applied to tokens which were incorrectly tagged by a sequence tagger
* rather than to all feature across the sequence which differ from the training sequence.
*/
public class SimplePerceptronSequenceTrainer extends AbstractEventModelSequenceTrainer {
public static final String PERCEPTRON_SEQUENCE_VALUE = "PERCEPTRON_SEQUENCE";
private int iterations;
private SequenceStream sequenceStream;
/** Number of events in the event set. */
private int numEvents;
/** Number of predicates. */
private int numPreds;
private int numOutcomes;
/** List of outcomes for each event i, in context[i]. */
private int[] outcomeList;
private String[] outcomeLabels;
/** Stores the average parameter values of each predicate during iteration. */
private MutableContext[] averageParams;
/** Mapping between context and an integer */
private Map<String, Integer> pmap;
private Map<String,Integer> omap;
/** Stores the estimated parameter value of each predicate during iteration. */
private MutableContext[] params;
private boolean useAverage;
private int[][][] updates;
private static final int VALUE = 0;
private static final int ITER = 1;
private static final int EVENT = 2;
private String[] predLabels;
private int numSequences;
public SimplePerceptronSequenceTrainer() {
}
@Override
public void validate() {
super.validate();
String algorithmName = getAlgorithm();
if (algorithmName != null) {
if (!PERCEPTRON_SEQUENCE_VALUE.equals(algorithmName)) {
throw new IllegalArgumentException("algorithmName must be PERCEPTRON_SEQUENCE");
}
}
}
@Deprecated
@Override
public boolean isValid() {
try {
validate();
return true;
}
catch (IllegalArgumentException e) {
return false;
}
}
public AbstractModel doTrain(SequenceStream events) throws IOException {
int iterations = getIterations();
int cutoff = getCutoff();
boolean useAverage = trainingParameters.getBooleanParameter("UseAverage", true);
return trainModel(iterations, events, cutoff, useAverage);
}
// << members related to AbstractSequenceTrainer
public AbstractModel trainModel(int iterations, SequenceStream sequenceStream,
int cutoff, boolean useAverage) throws IOException {
this.iterations = iterations;
this.sequenceStream = sequenceStream;
trainingParameters.put(AbstractDataIndexer.CUTOFF_PARAM, cutoff);
trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
DataIndexer di = new OnePassDataIndexer();
di.init(trainingParameters,reportMap);
di.index(new SequenceStreamEventStream(sequenceStream));
numSequences = 0;
sequenceStream.reset();
while (sequenceStream.read() != null) {
numSequences++;
}
outcomeList = di.getOutcomeList();
predLabels = di.getPredLabels();
pmap = new HashMap<>();
for (int i = 0; i < predLabels.length; i++) {
pmap.put(predLabels[i], i);
}
display("Incorporating indexed data for training... \n");
this.useAverage = useAverage;
numEvents = di.getNumEvents();
this.iterations = iterations;
outcomeLabels = di.getOutcomeLabels();
omap = new HashMap<>();
for (int oli = 0; oli < outcomeLabels.length; oli++) {
omap.put(outcomeLabels[oli], oli);
}
outcomeList = di.getOutcomeList();
numPreds = predLabels.length;
numOutcomes = outcomeLabels.length;
if (useAverage) {
updates = new int[numPreds][numOutcomes][3];
}
display("done.\n");
display("\tNumber of Event Tokens: " + numEvents + "\n");
display("\t Number of Outcomes: " + numOutcomes + "\n");
display("\t Number of Predicates: " + numPreds + "\n");
params = new MutableContext[numPreds];
if (useAverage) averageParams = new MutableContext[numPreds];
int[] allOutcomesPattern = new int[numOutcomes];
for (int oi = 0; oi < numOutcomes; oi++) {
allOutcomesPattern[oi] = oi;
}
for (int pi = 0; pi < numPreds; pi++) {
params[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
if (useAverage) averageParams[pi] = new MutableContext(allOutcomesPattern,new double[numOutcomes]);
for (int aoi = 0; aoi < numOutcomes; aoi++) {
params[pi].setParameter(aoi, 0.0);
if (useAverage) averageParams[pi].setParameter(aoi, 0.0);
}
}
double[] modelDistribution = new double[numOutcomes];
display("Computing model parameters...\n");
findParameters(iterations);
display("...done.\n");
/* Create and return the model ****/
String[] updatedPredLabels = predLabels;
if (useAverage) {
return new PerceptronModel(averageParams, updatedPredLabels, outcomeLabels);
}
else {
return new PerceptronModel(params, updatedPredLabels, outcomeLabels);
}
}
private void findParameters(int iterations) throws IOException {
display("Performing " + iterations + " iterations.\n");
for (int i = 1; i <= iterations; i++) {
if (i < 10)
display(" " + i + ": ");
else if (i < 100)
display(" " + i + ": ");
else
display(i + ": ");
nextIteration(i);
}
if (useAverage) {
trainingStats(averageParams);
}
else {
trainingStats(params);
}
}
public void nextIteration(int iteration) throws IOException {
iteration--; //move to 0-based index
int numCorrect = 0;
int oei = 0;
int si = 0;
List<Map<String,Float>> featureCounts = new ArrayList<>(numOutcomes);
for (int oi = 0; oi < numOutcomes; oi++) {
featureCounts.add(new HashMap<>());
}
PerceptronModel model = new PerceptronModel(params,predLabels,outcomeLabels);
sequenceStream.reset();
Sequence sequence;
while ((sequence = sequenceStream.read()) != null) {
Event[] taggerEvents = sequenceStream.updateContext(sequence, model);
Event[] events = sequence.getEvents();
boolean update = false;
for (int ei = 0; ei < events.length; ei++, oei++) {
if (!taggerEvents[ei].getOutcome().equals(events[ei].getOutcome())) {
update = true;
//break;
}
else {
numCorrect++;
}
}
if (update) {
for (int oi = 0; oi < numOutcomes; oi++) {
featureCounts.get(oi).clear();
}
//System.err.print("train:");for (int ei=0;ei<events.length;ei++)
// {System.err.print(" "+events[ei].getOutcome());} System.err.println();
//training feature count computation
for (int ei = 0; ei < events.length; ei++, oei++) {
String[] contextStrings = events[ei].getContext();
float[] values = events[ei].getValues();
int oi = omap.get(events[ei].getOutcome());
for (int ci = 0; ci < contextStrings.length; ci++) {
float value = 1;
if (values != null) {
value = values[ci];
}
Float c = featureCounts.get(oi).get(contextStrings[ci]);
if (c == null) {
c = value;
}
else {
c += value;
}
featureCounts.get(oi).put(contextStrings[ci], c);
}
}
//evaluation feature count computation
//System.err.print("test: ");for (int ei=0;ei<taggerEvents.length;ei++)
// {System.err.print(" "+taggerEvents[ei].getOutcome());} System.err.println();
for (Event taggerEvent : taggerEvents) {
String[] contextStrings = taggerEvent.getContext();
float[] values = taggerEvent.getValues();
int oi = omap.get(taggerEvent.getOutcome());
for (int ci = 0; ci < contextStrings.length; ci++) {
float value = 1;
if (values != null) {
value = values[ci];
}
Float c = featureCounts.get(oi).get(contextStrings[ci]);
if (c == null) {
c = -1 * value;
}
else {
c -= value;
}
if (c == 0f) {
featureCounts.get(oi).remove(contextStrings[ci]);
}
else {
featureCounts.get(oi).put(contextStrings[ci], c);
}
}
}
for (int oi = 0; oi < numOutcomes; oi++) {
for (String feature : featureCounts.get(oi).keySet()) {
int pi = pmap.get(feature);
if (pi != -1) {
//System.err.println(si+" "+outcomeLabels[oi]+" "+feature+" "+featureCounts[oi].get(feature));
params[pi].updateParameter(oi, featureCounts.get(oi).get(feature));
if (useAverage) {
if (updates[pi][oi][VALUE] != 0) {
averageParams[pi].updateParameter(oi,updates[pi][oi][VALUE] * (numSequences
* (iteration - updates[pi][oi][ITER]) + (si - updates[pi][oi][EVENT])));
//System.err.println("p avp["+pi+"]."+oi+"="+averageParams[pi].getParameters()[oi]);
}
//System.err.println("p updates["+pi+"]["+oi+"]=("+updates[pi][oi][ITER]+","
// +updates[pi][oi][EVENT]+","+updates[pi][oi][VALUE]+") + ("+iteration+","+oei+","
// +params[pi].getParameters()[oi]+") -> "+averageParams[pi].getParameters()[oi]);
updates[pi][oi][VALUE] = (int) params[pi].getParameters()[oi];
updates[pi][oi][ITER] = iteration;
updates[pi][oi][EVENT] = si;
}
}
}
}
model = new PerceptronModel(params,predLabels,outcomeLabels);
}
si++;
}
//finish average computation
double totIterations = (double) iterations * si;
if (useAverage && iteration == iterations - 1) {
for (int pi = 0; pi < numPreds; pi++) {
double[] predParams = averageParams[pi].getParameters();
for (int oi = 0; oi < numOutcomes; oi++) {
if (updates[pi][oi][VALUE] != 0) {
predParams[oi] += updates[pi][oi][VALUE] * (numSequences
* (iterations - updates[pi][oi][ITER]) - updates[pi][oi][EVENT]);
}
if (predParams[oi] != 0) {
predParams[oi] /= totIterations;
averageParams[pi].setParameter(oi, predParams[oi]);
//System.err.println("updates["+pi+"]["+oi+"]=("+updates[pi][oi][ITER]+","
// +updates[pi][oi][EVENT]+","+updates[pi][oi][VALUE]+") + ("+iterations+","+0+","
// +params[pi].getParameters()[oi]+") -> "+averageParams[pi].getParameters()[oi]);
}
}
}
}
display(". (" + numCorrect + "/" + numEvents + ") " + ((double) numCorrect / numEvents) + "\n");
}
private void trainingStats(MutableContext[] params) throws IOException {
int numCorrect = 0;
int oei = 0;
sequenceStream.reset();
Sequence sequence;
while ((sequence = sequenceStream.read()) != null) {
Event[] taggerEvents = sequenceStream.updateContext(sequence,
new PerceptronModel(params,predLabels,outcomeLabels));
for (int ei = 0; ei < taggerEvents.length; ei++, oei++) {
int max = omap.get(taggerEvents[ei].getOutcome());
if (max == outcomeList[oei]) {
numCorrect ++;
}
}
}
display(". (" + numCorrect + "/" + numEvents + ") " + ((double) numCorrect / numEvents) + "\n");
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/namefind/BilouCodec.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.namefind;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import opennlp.tools.util.SequenceCodec;
import opennlp.tools.util.SequenceValidator;
import opennlp.tools.util.Span;
public class BilouCodec implements SequenceCodec<String> {
public static final String START = "start";
public static final String CONTINUE = "cont";
public static final String LAST = "last";
public static final String UNIT = "unit";
public static final String OTHER = "other";
@Override
public Span[] decode(List<String> c) {
int start = -1;
int end = -1;
List<Span> spans = new ArrayList<>(c.size());
for (int li = 0; li < c.size(); li++) {
String chunkTag = c.get(li);
if (chunkTag.endsWith(BilouCodec.START)) {
start = li;
end = li + 1;
}
else if (chunkTag.endsWith(BilouCodec.CONTINUE)) {
end = li + 1;
}
else if (chunkTag.endsWith(LAST)) {
if (start != -1) {
spans.add(new Span(start, end + 1, BioCodec.extractNameType(c.get(li - 1))));
start = -1;
end = -1;
}
}
else if (chunkTag.endsWith(UNIT)) {
spans.add(new Span(li, li + 1, BioCodec.extractNameType(c.get(li))));
}
}
return spans.toArray(new Span[spans.size()]);
}
@Override
public String[] encode(Span[] names, int length) {
String[] outcomes = new String[length];
Arrays.fill(outcomes, BilouCodec.OTHER);
for (Span name : names) {
if (name.length() > 1) {
if (name.getType() == null) {
outcomes[name.getStart()] = "default" + "-" + BilouCodec.START;
}
else {
outcomes[name.getStart()] = name.getType() + "-" + BilouCodec.START;
}
// now iterate from begin + 1 till end
for (int i = name.getStart() + 1; i < name.getEnd() - 1; i++) {
if (name.getType() == null) {
outcomes[i] = "default" + "-" + BilouCodec.CONTINUE;
}
else {
outcomes[i] = name.getType() + "-" + BilouCodec.CONTINUE;
}
}
if (name.getType() == null) {
outcomes[name.getEnd() - 1] = "default" + "-" + BilouCodec.LAST;
}
else {
outcomes[name.getEnd() - 1] = name.getType() + "-" + BilouCodec.LAST;
}
}
else {
if (name.getType() == null) {
outcomes[name.getEnd() - 1] = "default" + "-" + BilouCodec.UNIT;
}
else {
outcomes[name.getEnd() - 1] = name.getType() + "-" + BilouCodec.UNIT;
}
}
}
return outcomes;
}
@Override
public SequenceValidator<String> createSequenceValidator() {
return new BilouNameFinderSequenceValidator();
}
/**
* B requires CL or L
* C requires BL
* L requires B
* O requires any valid combo/unit
* U requires none
*
* @param outcomes all possible model outcomes
*
* @return true, if model outcomes are compatible
*/
@Override
public boolean areOutcomesCompatible(String[] outcomes) {
Set<String> start = new HashSet<>();
Set<String> cont = new HashSet<>();
Set<String> last = new HashSet<>();
Set<String> unit = new HashSet<>();
for (int i = 0; i < outcomes.length; i++) {
String outcome = outcomes[i];
if (outcome.endsWith(BilouCodec.START)) {
start.add(outcome.substring(0, outcome.length()
- BilouCodec.START.length()));
} else if (outcome.endsWith(BilouCodec.CONTINUE)) {
cont.add(outcome.substring(0, outcome.length()
- BilouCodec.CONTINUE.length()));
} else if (outcome.endsWith(BilouCodec.LAST)) {
last.add(outcome.substring(0, outcome.length()
- BilouCodec.LAST.length()));
} else if (outcome.endsWith(BilouCodec.UNIT)) {
unit.add(outcome.substring(0, outcome.length()
- BilouCodec.UNIT.length()));
} else if (!outcome.equals(BilouCodec.OTHER)) {
return false;
}
}
if (start.size() == 0 && unit.size() == 0) {
return false;
} else {
// Start, must have matching Last
for (String startPrefix : start) {
if (!last.contains(startPrefix)) {
return false;
}
}
// Cont, must have matching Start and Last
for (String contPrefix : cont) {
if (!start.contains(contPrefix) && !last.contains(contPrefix)) {
return false;
}
}
// Last, must have matching Start
for (String lastPrefix : last) {
if (!start.contains(lastPrefix)) {
return false;
}
}
}
return true;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/namefind/BilouNameFinderSequenceValidator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.namefind;
import opennlp.tools.util.SequenceValidator;
public class BilouNameFinderSequenceValidator implements
SequenceValidator<String> {
public boolean validSequence(int i, String[] inputSequence,
String[] outcomesSequence, String outcome) {
if (outcome.endsWith(BilouCodec.CONTINUE) || outcome.endsWith(BilouCodec.LAST)) {
int li = outcomesSequence.length - 1;
if (li == -1) {
return false;
} else if (outcomesSequence[li].endsWith(BilouCodec.OTHER) ||
outcomesSequence[li].endsWith(BilouCodec.UNIT)) {
return false;
} else if (outcomesSequence[li].endsWith(BilouCodec.LAST) &&
(outcome.endsWith(BilouCodec.CONTINUE) || outcome.endsWith(BilouCodec.LAST))) {
return false;
} else if (outcomesSequence[li].endsWith(BilouCodec.CONTINUE) ||
outcomesSequence[li].endsWith(BilouCodec.START)) {
// if it is continue, we have to check if previous match was of the same type
String previousNameType = NameFinderME.extractNameType(outcomesSequence[li]);
String nameType = NameFinderME.extractNameType(outcome);
if (previousNameType != null || nameType != null) {
if (nameType != null) {
if (nameType.equals(previousNameType)) {
return true;
}
}
return false; // outcomes types are not equal
}
}
}
if (outcomesSequence.length > 0) {
if (outcome.endsWith(BilouCodec.START)
|| outcome.endsWith(BilouCodec.OTHER)
|| outcome.endsWith(BilouCodec.UNIT)) {
if (outcomesSequence[outcomesSequence.length - 1].endsWith(BilouCodec.START)
|| outcomesSequence[outcomesSequence.length - 1].endsWith(BilouCodec.CONTINUE)) {
return false;
}
}
}
return true;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/namefind/BioCodec.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.namefind;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import opennlp.tools.util.SequenceCodec;
import opennlp.tools.util.Span;
public class BioCodec implements SequenceCodec<String> {
public static final String START = "start";
public static final String CONTINUE = "cont";
public static final String OTHER = "other";
private static final Pattern typedOutcomePattern = Pattern.compile("(.+)-\\w+");
static String extractNameType(String outcome) {
Matcher matcher = typedOutcomePattern.matcher(outcome);
if (matcher.matches()) {
return matcher.group(1);
}
return null;
}
public Span[] decode(List<String> c) {
int start = -1;
int end = -1;
List<Span> spans = new ArrayList<>(c.size());
for (int li = 0; li < c.size(); li++) {
String chunkTag = c.get(li);
if (chunkTag.endsWith(BioCodec.START)) {
if (start != -1) {
spans.add(new Span(start, end, extractNameType(c.get(li - 1))));
}
start = li;
end = li + 1;
}
else if (chunkTag.endsWith(BioCodec.CONTINUE)) {
end = li + 1;
}
else if (chunkTag.endsWith(BioCodec.OTHER)) {
if (start != -1) {
spans.add(new Span(start, end, extractNameType(c.get(li - 1))));
start = -1;
end = -1;
}
}
}
if (start != -1) {
spans.add(new Span(start, end, extractNameType(c.get(c.size() - 1))));
}
return spans.toArray(new Span[spans.size()]);
}
public String[] encode(Span[] names, int length) {
String[] outcomes = new String[length];
for (int i = 0; i < outcomes.length; i++) {
outcomes[i] = BioCodec.OTHER;
}
for (Span name : names) {
if (name.getType() == null) {
outcomes[name.getStart()] = "default" + "-" + BioCodec.START;
}
else {
outcomes[name.getStart()] = name.getType() + "-" + BioCodec.START;
}
// now iterate from begin + 1 till end
for (int i = name.getStart() + 1; i < name.getEnd(); i++) {
if (name.getType() == null) {
outcomes[i] = "default" + "-" + BioCodec.CONTINUE;
}
else {
outcomes[i] = name.getType() + "-" + BioCodec.CONTINUE;
}
}
}
return outcomes;
}
public NameFinderSequenceValidator createSequenceValidator() {
return new NameFinderSequenceValidator();
}
@Override
public boolean areOutcomesCompatible(String[] outcomes) {
// We should have *optionally* one outcome named "other", some named xyz-start and sometimes
// they have a pair xyz-cont. We should not have any other outcome
// To validate the model we check if we have one outcome named "other", at least
// one outcome with suffix start. After that we check if all outcomes that ends with
// "cont" have a pair that ends with "start".
List<String> start = new ArrayList<>();
List<String> cont = new ArrayList<>();
for (int i = 0; i < outcomes.length; i++) {
String outcome = outcomes[i];
if (outcome.endsWith(BioCodec.START)) {
start.add(outcome.substring(0, outcome.length()
- BioCodec.START.length()));
} else if (outcome.endsWith(BioCodec.CONTINUE)) {
cont.add(outcome.substring(0, outcome.length()
- BioCodec.CONTINUE.length()));
} else if (!outcome.equals(BioCodec.OTHER)) {
// got unexpected outcome
return false;
}
}
if (start.size() == 0) {
return false;
} else {
for (String contPreffix : cont) {
if (!start.contains(contPreffix)) {
return false;
}
}
}
return true;
}
}
|
0
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools
|
java-sources/ai/idylnlp/idylnlp-opennlp-tools-1.8.3/1.1.0/opennlp/tools/namefind/DefaultNameContextGenerator.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.namefind;
import java.util.ArrayList;
import java.util.List;
import opennlp.tools.util.featuregen.AdaptiveFeatureGenerator;
import opennlp.tools.util.featuregen.BigramNameFeatureGenerator;
import opennlp.tools.util.featuregen.CachedFeatureGenerator;
import opennlp.tools.util.featuregen.FeatureGeneratorUtil;
import opennlp.tools.util.featuregen.OutcomePriorFeatureGenerator;
import opennlp.tools.util.featuregen.PreviousMapFeatureGenerator;
import opennlp.tools.util.featuregen.TokenClassFeatureGenerator;
import opennlp.tools.util.featuregen.TokenFeatureGenerator;
import opennlp.tools.util.featuregen.WindowFeatureGenerator;
/**
* Class for determining contextual features for a tag/chunk style
* named-entity recognizer.
*/
public class DefaultNameContextGenerator implements NameContextGenerator {
private AdaptiveFeatureGenerator[] featureGenerators;
@Deprecated
private static AdaptiveFeatureGenerator windowFeatures = new CachedFeatureGenerator(
new WindowFeatureGenerator(new TokenFeatureGenerator(), 2, 2),
new WindowFeatureGenerator(new TokenClassFeatureGenerator(true), 2, 2),
new OutcomePriorFeatureGenerator(),
new PreviousMapFeatureGenerator(),
new BigramNameFeatureGenerator());
/**
* Creates a name context generator.
* @deprecated use the other constructor and always provide the feature generators
*/
@Deprecated
public DefaultNameContextGenerator() {
this((AdaptiveFeatureGenerator[]) null);
}
/**
* Creates a name context generator with the specified cache size.
*/
public DefaultNameContextGenerator(AdaptiveFeatureGenerator... featureGenerators) {
if (featureGenerators != null) {
this.featureGenerators = featureGenerators;
}
else {
// use defaults
this.featureGenerators = new AdaptiveFeatureGenerator[]{
windowFeatures,
new PreviousMapFeatureGenerator()};
}
}
public void addFeatureGenerator(AdaptiveFeatureGenerator generator) {
AdaptiveFeatureGenerator[] generators = featureGenerators;
featureGenerators = new AdaptiveFeatureGenerator[featureGenerators.length + 1];
System.arraycopy(generators, 0, featureGenerators, 0, generators.length);
featureGenerators[featureGenerators.length - 1] = generator;
}
public void updateAdaptiveData(String[] tokens, String[] outcomes) {
if (tokens != null && outcomes != null && tokens.length != outcomes.length) {
throw new IllegalArgumentException(
"The tokens and outcome arrays MUST have the same size!");
}
for (AdaptiveFeatureGenerator featureGenerator : featureGenerators) {
featureGenerator.updateAdaptiveData(tokens, outcomes);
}
}
public void clearAdaptiveData() {
for (AdaptiveFeatureGenerator featureGenerator : featureGenerators) {
featureGenerator.clearAdaptiveData();
}
}
/**
* Return the context for finding names at the specified index.
* @param index The index of the token in the specified toks array for which the
* context should be constructed.
* @param tokens The tokens of the sentence. The <code>toString</code> methods
* of these objects should return the token text.
* @param preds The previous decisions made in the tagging of this sequence.
* Only indices less than i will be examined.
* @param additionalContext Addition features which may be based on a context outside of the sentence.
*
* @return the context for finding names at the specified index.
*/
public String[] getContext(int index, String[] tokens, String[] preds, Object[] additionalContext) {
List<String> features = new ArrayList<>();
for (AdaptiveFeatureGenerator featureGenerator : featureGenerators) {
featureGenerator.createFeatures(features, tokens, index, preds);
}
//previous outcome features
String po = NameFinderME.OTHER;
String ppo = NameFinderME.OTHER;
// TODO: These should be moved out here in its own feature generator!
if (preds != null) {
if (index > 1) {
ppo = preds[index - 2];
}
if (index > 0) {
po = preds[index - 1];
}
features.add("po=" + po);
features.add("pow=" + po + "," + tokens[index]);
features.add("powf=" + po + "," + FeatureGeneratorUtil.tokenFeature(tokens[index]));
features.add("ppo=" + ppo);
}
return features.toArray(new String[features.size()]);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.