Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OPENNLP-948: Use Objects.requireNonNull when possible #76

Merged
merged 1 commit into from Jan 19, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -21,6 +21,7 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;

import opennlp.tools.util.InvalidFormatException;

Expand All @@ -36,11 +37,7 @@ public abstract class ModelLoader<T> {
private final String modelName;

protected ModelLoader(String modelName) {

if (modelName == null)
throw new IllegalArgumentException("modelName must not be null!");

this.modelName = modelName;
this.modelName = Objects.requireNonNull(modelName, "modelName must not be null!");
}

protected abstract T loadModel(InputStream modelIn) throws IOException;
Expand Down
Expand Up @@ -15,12 +15,12 @@
* limitations under the License.
*/


package opennlp.tools.dictionary.serializer;

import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;

/**
* The {@link Attributes} class stores name value pairs.
Expand Down Expand Up @@ -50,13 +50,8 @@ public String getValue(String key) {
* @param value
*/
public void setValue(String key, String value) {

if (key == null) {
throw new IllegalArgumentException("key must not be null");
}
if (value == null) {
throw new IllegalArgumentException("value must not be null");
}
Objects.requireNonNull(key, "key must not be null");
Objects.requireNonNull(value, "value must not be null");

mNameValueMap.put(key, value);
}
Expand Down
Expand Up @@ -44,14 +44,9 @@ public DocumentSample(String category, String text[]) {
}

public DocumentSample(String category, String text[], Map<String, Object> extraInformation) {
if (category == null) {
throw new IllegalArgumentException("category must not be null");
}
if (text == null) {
throw new IllegalArgumentException("text must not be null");
}
Objects.requireNonNull(text, "text must not be null");

this.category = category;
this.category = Objects.requireNonNull(category, "category must not be null");
this.text = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(text)));

if (extraInformation == null) {
Expand Down
Expand Up @@ -17,6 +17,7 @@
package opennlp.tools.entitylinker;

import java.io.IOException;
import java.util.Objects;

import opennlp.tools.util.ext.ExtensionLoader;

Expand Down Expand Up @@ -70,9 +71,7 @@ public static synchronized EntityLinker<?> getLinker(String entityType, EntityLi
* @throws java.io.IOException
*/
public static synchronized EntityLinker<?> getLinker(EntityLinkerProperties properties) throws IOException {
if (properties == null) {
throw new IllegalArgumentException("Null argument in entityLinkerFactory");
}
Objects.requireNonNull(properties, "properties argument must not be null");

String linkerImplFullName = properties.getProperty("linker", "");

Expand Down
Expand Up @@ -20,6 +20,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

import opennlp.tools.sentdetect.SentenceSample;
import opennlp.tools.tokenize.Detokenizer;
Expand All @@ -37,18 +38,18 @@ public abstract class AbstractToSentenceSampleStream<T> extends
ObjectStream<T> samples, int chunkSize) {
super(samples);

if (detokenizer == null)
throw new IllegalArgumentException("detokenizer must not be null!");
this.detokenizer = Objects.requireNonNull(detokenizer, "detokenizer must not be null");

this.detokenizer = detokenizer;

if (chunkSize < 0)
if (chunkSize < 0) {
throw new IllegalArgumentException("chunkSize must be zero or larger but was " + chunkSize + "!");
}

if (chunkSize > 0)
if (chunkSize > 0) {
this.chunkSize = chunkSize;
else
}
else {
this.chunkSize = Integer.MAX_VALUE;
}
}

protected abstract String[] toSentence(T sample);
Expand All @@ -63,13 +64,14 @@ public SentenceSample read() throws IOException {
chunks++;
}

if (sentences.size() > 0)
if (sentences.size() > 0) {
return new SentenceSample(detokenizer,
sentences.toArray(new String[sentences.size()][]));
else if (posSample != null)
}
else if (posSample != null) {
return read(); // filter out empty line
else {
return null; // last sample was read
}

return null; // last sample was read
}
}
Expand Up @@ -18,6 +18,7 @@
package opennlp.tools.formats.convert;

import java.io.IOException;
import java.util.Objects;

import opennlp.tools.postag.POSSample;
import opennlp.tools.tokenize.Detokenizer;
Expand All @@ -33,13 +34,9 @@ public class POSToTokenSampleStream extends FilterObjectStream<POSSample, TokenS
private final Detokenizer detokenizer;

public POSToTokenSampleStream(Detokenizer detokenizer, ObjectStream<POSSample> samples) {

super(samples);

if (detokenizer == null)
throw new IllegalArgumentException("detokenizer must not be null!");

this.detokenizer = detokenizer;
this.detokenizer = Objects.requireNonNull(detokenizer, "detokenizer must not be null!");
}

public TokenSample read() throws IOException {
Expand Down
Expand Up @@ -15,11 +15,11 @@
* limitations under the License.
*/


package opennlp.tools.namefind;

import java.util.LinkedList;
import java.util.List;
import java.util.Objects;

import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.util.Span;
Expand All @@ -44,13 +44,8 @@ public class DictionaryNameFinder implements TokenNameFinder {
* @param type the name type used for the produced spans
*/
public DictionaryNameFinder(Dictionary dictionary, String type) {
mDictionary = dictionary;

if (type == null) {
throw new IllegalArgumentException("type cannot be null!");
}

this.type = type;
mDictionary = Objects.requireNonNull(dictionary, "dictionary must not be null");
this.type = Objects.requireNonNull(type, "type must not be null");
}

/**
Expand Down
Expand Up @@ -45,12 +45,9 @@ public class NameSample {

public NameSample(String id, String[] sentence, Span[] names,
String[][] additionalContext, boolean clearAdaptiveData) {

this.id = id;

if (sentence == null) {
throw new IllegalArgumentException("sentence must not be null!");
}
Objects.requireNonNull(sentence, "sentence must not be null");

if (names == null) {
names = new Span[0];
Expand Down
Expand Up @@ -21,6 +21,7 @@
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

Expand All @@ -36,11 +37,7 @@ public final class RegexNameFinder implements TokenNameFinder {
private Map<String, Pattern[]> regexMap;

public RegexNameFinder(Map<String, Pattern[]> regexMap) {
if (regexMap == null) {
throw new IllegalArgumentException("regexNameFinders must not be null");
}
this.regexMap = regexMap;

this.regexMap = Objects.requireNonNull(regexMap, "regexMap must not be null");
}

public RegexNameFinder(Pattern patterns[], String type) {
Expand Down Expand Up @@ -196,4 +193,4 @@ public String getsType() {
public void setsType(String sType) {
this.sType = sType;
}
}
}
Expand Up @@ -18,6 +18,7 @@

import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;

/**
Expand All @@ -39,9 +40,8 @@ public class RegexNameFinderFactory {
*/
public static synchronized RegexNameFinder getDefaultRegexNameFinders(
Map<String, Pattern[]> config, DEFAULT_REGEX_NAME_FINDER... defaults) {
if (config == null) {
throw new IllegalArgumentException("config Map cannot be null");
}
Objects.requireNonNull(config, "config must not be null");

Map<String, Pattern[]> defaultsToMap = new HashMap<>();
if (defaults != null) {
defaultsToMap = defaultsToMap(defaults);
Expand All @@ -58,9 +58,7 @@ public static synchronized RegexNameFinder getDefaultRegexNameFinders(
*/
public static synchronized RegexNameFinder getDefaultRegexNameFinders(
DEFAULT_REGEX_NAME_FINDER... defaults) {
if (defaults == null) {
throw new IllegalArgumentException("defaults cannot be null");
}
Objects.requireNonNull(defaults, "defaults must not be null");
return new RegexNameFinder(defaultsToMap(defaults));
}

Expand Down
Expand Up @@ -15,7 +15,6 @@
* limitations under the License.
*/


package opennlp.tools.parser;

import java.io.BufferedReader;
Expand All @@ -27,6 +26,7 @@
import java.io.OutputStreamWriter;
import java.net.URL;
import java.util.Map;
import java.util.Objects;

import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.ml.BeamSearch;
Expand Down Expand Up @@ -141,9 +141,7 @@ public ParserModel(String languageCode, MaxentModel buildModel, MaxentModel chec
throw new IllegalArgumentException("attachModel must be null for chunking parser!");
}
else if (ParserType.TREEINSERT.equals(modelType)) {
if (attachModel == null)
throw new IllegalArgumentException("attachModel must not be null!");

Objects.requireNonNull(attachModel, "attachModel must not be null");
artifactMap.put(ATTACH_MODEL_ENTRY_NAME, attachModel);
}
else {
Expand Down Expand Up @@ -301,4 +299,4 @@ else if (ParserType.TREEINSERT.equals(modelType)) {
throw new InvalidFormatException("Missing the head rules!");
}
}
}
}
Expand Up @@ -67,8 +67,7 @@ public HeadRule(boolean l2r, String[] tags) {
leftToRight = l2r;

for (String tag : tags) {
if (tag == null)
throw new IllegalArgumentException("tags must not contain null values!");
Objects.requireNonNull(tag, "tags must not contain null values");
}

this.tags = tags;
Expand Down
Expand Up @@ -80,8 +80,7 @@ public HeadRule(boolean l2r, String[] tags) {
leftToRight = l2r;

for (String tag : tags) {
if (tag == null)
throw new IllegalArgumentException("tags must not contain null values!");
Objects.requireNonNull(tag, "tags must not contain null values!");
}

this.tags = tags;
Expand Down
Expand Up @@ -46,14 +46,9 @@ public class TokenSample {
* @param tokenSpans the spans which mark the begin and end of the tokens.
*/
public TokenSample(String text, Span tokenSpans[]) {
Objects.requireNonNull(tokenSpans, "tokenSpans must not be null");

if (text == null)
throw new IllegalArgumentException("text must not be null!");

if (tokenSpans == null)
throw new IllegalArgumentException("tokenSpans must not be null! ");

this.text = text;
this.text = Objects.requireNonNull(text, "text must not be null");
this.tokenSpans = Collections.unmodifiableList(new ArrayList<>(Arrays.asList(tokenSpans)));

for (Span tokenSpan : tokenSpans) {
Expand Down Expand Up @@ -161,13 +156,8 @@ private static void addToken(StringBuilder sample, List<Span> tokenSpans,
}

public static TokenSample parse(String sampleString, String separatorChars) {

if (sampleString == null) {
throw new IllegalArgumentException("sampleString must not be null!");
}
if (separatorChars == null) {
throw new IllegalArgumentException("separatorChars must not be null!");
}
Objects.requireNonNull(sampleString, "sampleString must not be null");
Objects.requireNonNull(separatorChars, "separatorChars must not be null");

Span whitespaceTokenSpans[] = WhitespaceTokenizer.INSTANCE.tokenizePos(sampleString);

Expand Down
Expand Up @@ -18,6 +18,7 @@
package opennlp.tools.tokenize;

import java.io.IOException;
import java.util.Objects;

import opennlp.tools.util.FilterObjectStream;
import opennlp.tools.util.ObjectStream;
Expand All @@ -39,19 +40,9 @@ public class TokenSampleStream extends FilterObjectStream<String, TokenSample> {

private final String separatorChars;


public TokenSampleStream(ObjectStream<String> sampleStrings, String separatorChars) {

super(sampleStrings);

if (sampleStrings == null) {
throw new IllegalArgumentException("sampleStrings must not be null!");
}
if (separatorChars == null) {
throw new IllegalArgumentException("separatorChars must not be null!");
}

this.separatorChars = separatorChars;
super(Objects.requireNonNull(sampleStrings, "sampleStrings must not be null"));
this.separatorChars = Objects.requireNonNull(separatorChars,"separatorChars must not be null");
}

public TokenSampleStream(ObjectStream<String> sentences) {
Expand Down