Skip to content

Commit

Permalink
Remove redundant field initializers (#13060)
Browse files Browse the repository at this point in the history
Make protected fields in final classes private.
  • Loading branch information
sabi0 committed Feb 19, 2024
1 parent 39f6818 commit 9a88d8a
Show file tree
Hide file tree
Showing 32 changed files with 48 additions and 48 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ public class MinHashFilter extends TokenFilter {

private final List<List<FixedSizeTreeSet<LongPair>>> minHashSets;

private int hashSetSize = DEFAULT_HASH_SET_SIZE;
private final int hashSetSize;

private int bucketCount = DEFAULT_BUCKET_COUNT;
private final int bucketCount;

private int hashCount = DEFAULT_HASH_COUNT;
private final int hashCount;

private boolean requiresInitialisation = true;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@ public class MinHashFilterFactory extends TokenFilterFactory {
/** SPI name */
public static final String NAME = "minHash";

private int hashCount = MinHashFilter.DEFAULT_HASH_COUNT;
private final int hashCount;

private int bucketCount = MinHashFilter.DEFAULT_BUCKET_COUNT;
private final int bucketCount;

private int hashSetSize = MinHashFilter.DEFAULT_HASH_SET_SIZE;
private final int hashSetSize;

private boolean withRotation;
private final boolean withRotation;

/** Create a {@link MinHashFilterFactory}. */
public MinHashFilterFactory(Map<String, String> args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public class WordDelimiterGraphFilterFactory extends TokenFilterFactory
private final int flags;
byte[] typeTable = null;
private CharArraySet protectedWords = null;
private boolean adjustOffsets = false;
private final boolean adjustOffsets;

/** Creates a new WordDelimiterGraphFilterFactory */
public WordDelimiterGraphFilterFactory(Map<String, String> args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ private static class DefaultSetHolder {
private final CharArraySet stoptable;

/** Contains words that should be indexed but not stemmed. */
private CharArraySet excltable = CharArraySet.EMPTY_SET;
private final CharArraySet excltable;

private final StemmerOverrideMap stemdict;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ public class PatternCaptureGroupFilterFactory extends TokenFilterFactory {
/** SPI name */
public static final String NAME = "patternCaptureGroup";

private Pattern pattern;
private boolean preserveOriginal = true;
private final Pattern pattern;
private final boolean preserveOriginal;

public PatternCaptureGroupFilterFactory(Map<String, String> args) {
super(args);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ public class JapaneseTokenizerFactory extends TokenizerFactory implements Resour
* /箱根山-箱根/成田空港-成田/ requests "箱根" and "成田" to be in the result in NBEST output.
*/
private final String nbestExamples;
private int nbestCost = -1;
private int nbestCost;

/** Creates a new JapaneseTokenizerFactory */
public JapaneseTokenizerFactory(Map<String, String> args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public final class OpenNLPTokenizer extends SegmentingTokenizerBase {
private int sentenceStart = 0;
private int sentenceIndex = -1;

private NLPTokenizerOp tokenizerOp = null;
private final NLPTokenizerOp tokenizerOp;

public OpenNLPTokenizer(
AttributeFactory factory, NLPSentenceDetectorOp sentenceOp, NLPTokenizerOp tokenizerOp)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

/** Supply OpenNLP Chunking tool Requires binary models from OpenNLP project on SourceForge. */
public class NLPChunkerOp {
private ChunkerME chunker = null;
private final ChunkerME chunker;

public NLPChunkerOp(ChunkerModel chunkerModel) throws IOException {
chunker = new ChunkerME(chunkerModel);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
* SourceForge.
*/
public class NLPPOSTaggerOp {
private POSTagger tagger = null;
private final POSTagger tagger;

public NLPPOSTaggerOp(POSModel model) throws IOException {
tagger = new POSTaggerME(model);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,10 @@
*/
public final class DaitchMokotoffSoundexFilter extends TokenFilter {
/** true if encoded tokens should be added as synonyms */
protected boolean inject = true;
private final boolean inject;

/** phonetic encoder */
protected DaitchMokotoffSoundex encoder = new DaitchMokotoffSoundex();
private final DaitchMokotoffSoundex encoder = new DaitchMokotoffSoundex();

// output is a string such as ab|ac|...
private static final Pattern pattern = Pattern.compile("([^|]+)");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@
*/
public final class PhoneticFilter extends TokenFilter {
/** true if encoded tokens should be added as synonyms */
protected boolean inject = true;
private final boolean inject;

/** phonetic encoder */
protected Encoder encoder = null;
private final Encoder encoder;

/** captured state, non-null when <code>inject=true</code> and a token is buffered */
protected State save = null;
private State save = null;

private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final PositionIncrementAttribute posAtt = addAttribute(PositionIncrementAttribute.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public class Trie {
List<CharSequence> cmds = new ArrayList<>();
int root;

boolean forward = false;
boolean forward;

/**
* Constructor for the Trie object.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public class TaskSequence extends PerfTask {
private boolean resetExhausted = false;
private PerfTask[] tasksArray;
private boolean anyExhaustibleTasks;
private boolean collapsable = false; // to not collapse external sequence named in alg.
private final boolean collapsable; // to not collapse external sequence named in alg.

private boolean fixedTime; // true if we run for fixed time
private double runTimeSec; // how long to run for
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class TestTrecContentSource extends LuceneTestCase {
/** A TrecDocMaker which works on a String and not files. */
private static class StringableTrecSource extends TrecContentSource {

private String docs = null;
private final String docs;

public StringableTrecSource(String docs, boolean forever) {
this.docs = docs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public final class FieldInfo {
/** Internal field number */
public final int number;

private DocValuesType docValuesType = DocValuesType.NONE;
private DocValuesType docValuesType;

// True if any document indexed term vectors
private boolean storeTermVector;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public class LiveIndexWriterConfig {
protected volatile int perThreadHardLimitMB;

/** True if segment flushes should use compound file format */
protected volatile boolean useCompoundFile = IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM;
protected volatile boolean useCompoundFile;

/** True if calls to {@link IndexWriter#close()} should first do a commit. */
protected boolean commitOnClose = IndexWriterConfig.DEFAULT_COMMIT_ON_CLOSE;
Expand Down
4 changes: 2 additions & 2 deletions lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
Original file line number Diff line number Diff line change
Expand Up @@ -597,12 +597,12 @@ public MergeAbortedException(String message) {
* If the size of the merge segment exceeds this ratio of the total index size then it will remain
* in non-compound format
*/
protected double noCFSRatio = DEFAULT_NO_CFS_RATIO;
protected double noCFSRatio;

/**
* If the size of the merged segment exceeds this value then it will not use compound file format.
*/
protected long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE;
protected long maxCFSSegmentSize;

/** Creates a new merge policy instance. */
protected MergePolicy() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ private static long scaleMinScore(float minScore, int scalingFactor) {

private final int scalingFactor;
// scaled min competitive score
private long minCompetitiveScore = 0;
private long minCompetitiveScore;

private final Scorer[] allScorers;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public class TermOrdValComparator extends FieldComparator<BytesRef> {
private boolean singleSort;

/** Whether this comparator is allowed to skip documents. */
private boolean canSkipDocuments = true;
private boolean canSkipDocuments;

/** Whether the collector is done with counting hits so that we can start skipping documents. */
private boolean hitsThresholdReached = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ public DocIdSetIterator iterator() throws IOException {
private class Iterator extends DocIdSetIterator {

int block;
DocIdSetIterator sub = null;
DocIdSetIterator sub;
int doc;

Iterator() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
*/
public class LimitedFiniteStringsIterator extends FiniteStringsIterator {
/** Maximum number of finite strings to create. */
private int limit = Integer.MAX_VALUE;
private final int limit;

/** Number of generated finite strings. */
private int count = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public static class OfPostings extends OffsetsEnum {
private final PostingsEnum postingsEnum; // with offsets
private final int freq;

private int posCounter = -1;
private int posCounter;

public OfPostings(BytesRef term, int freq, PostingsEnum postingsEnum) throws IOException {
this.term = Objects.requireNonNull(term);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
*/
public class BoostQueryNode extends QueryNodeImpl {

private float value = 0;
private float value;

/**
* Constructs a boost node
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public String toLargeString() {
}
}

private Modifier modifier = Modifier.MOD_NONE;
private Modifier modifier;

/**
* Used to store the modifier value on the original query string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@
*/
public class OpaqueQueryNode extends QueryNodeImpl {

private CharSequence schema = null;
private CharSequence schema;

private CharSequence value = null;
private CharSequence value;

/**
* @param schema - schema identifier
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public class PathQueryNode extends QueryNodeImpl {

/** Term text with a beginning and end position */
public static class QueryText implements Cloneable {
CharSequence value = null;
CharSequence value;

/** != null The term's begin position. */
int begin;
Expand Down Expand Up @@ -97,7 +97,7 @@ public String toString() {
}
}

private List<QueryText> values = null;
private List<QueryText> values;

/**
* @param pathElements - List of QueryText objects
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
/** Query node for {@link PhraseQuery}'s slop factor. */
public class PhraseSlopQueryNode extends QueryNodeImpl implements FieldableNode {

private int value = 0;
private int value;

/**
* @exception QueryNodeError throw in overridden method to disallow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ CharSequence toQueryString() {

/** utility class containing the distance condition and number */
public static class ProximityType {
int pDistance = 0;
int pDistance;

Type pType = null;
Type pType;

public ProximityType(Type type) {
this(type, 0);
Expand All @@ -71,10 +71,10 @@ public ProximityType(Type type, int distance) {
}
}

private Type proximityType = Type.SENTENCE;
private Type proximityType;
private int distance = -1;
private boolean inorder = false;
private CharSequence field = null;
private final boolean inorder;
private CharSequence field;

/**
* @param clauses - QueryNode children
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
*/
public class SlopQueryNode extends QueryNodeImpl implements FieldableNode {

private int value = 0;
private int value;

/**
* @param query - QueryNode Tree with the phrase
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
*/
public class FieldBoostMapFCListener implements FieldConfigListener {

private QueryConfigHandler config = null;
private final QueryConfigHandler config;

public FieldBoostMapFCListener(QueryConfigHandler config) {
this.config = config;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
*/
public class FieldDateResolutionFCListener implements FieldConfigListener {

private QueryConfigHandler config = null;
private final QueryConfigHandler config;

public FieldDateResolutionFCListener(QueryConfigHandler config) {
this.config = config;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ public class DocHelper {
public static Field textUtfField2 =
new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, TEXT_TYPE_STORED_WITH_TVS);

public static Map<String, Object> nameValues = null;
public static Map<String, Object> nameValues;

// ordered list of all the fields...
// could use LinkedHashMap for this purpose if Java1.4 is OK
Expand Down

0 comments on commit 9a88d8a

Please sign in to comment.