Skip to content

Commit

Permalink
SOLR-16405: Remove unneeded errorprone suppressions (#1001)
Browse files Browse the repository at this point in the history
  • Loading branch information
risdenk committed Sep 9, 2022
1 parent 271715c commit e0f9d53
Show file tree
Hide file tree
Showing 39 changed files with 102 additions and 144 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,6 @@ private final void doReplicateOnlyRecovery(SolrCore core) throws InterruptedExce
/**
* @return true if we have reached max attempts or should stop recovering for some other reason
*/
@SuppressWarnings("NarrowCalculation")
private boolean waitBetweenRecoveries(String coreName) {
// lets pause for a moment and we need to try again...
// TODO: we don't want to retry for some problems?
Expand Down Expand Up @@ -473,7 +472,7 @@ private boolean waitBetweenRecoveries(String coreName) {
// Wait an exponential interval between retries, start at 4 seconds and work up to a minute.
// Meanwhile we will check in 2s sub-intervals to see if we've been closed
// Maximum loop count is 30 because we never want to wait longer than a minute (2s * 30 = 1m)
int loopCount = retries < 5 ? (int) Math.pow(2, retries) : 30;
long loopCount = retries < 5 ? Math.round(Math.pow(2, retries)) : 30;
if (log.isInfoEnabled()) {
log.info(
"Wait [{}] seconds before trying to recover again (attempt={})",
Expand Down
3 changes: 1 addition & 2 deletions solr/core/src/java/org/apache/solr/cloud/ZkController.java
Original file line number Diff line number Diff line change
Expand Up @@ -1062,7 +1062,6 @@ private void init() {
}
}

@SuppressWarnings("NarrowCalculation")
private void checkForExistingEphemeralNode() throws KeeperException, InterruptedException {
if (zkRunOnly) {
return;
Expand Down Expand Up @@ -1094,7 +1093,7 @@ private void checkForExistingEphemeralNode() throws KeeperException, Interrupted
}

boolean deleted =
deletedLatch.await(zkClient.getZooKeeper().getSessionTimeout() * 2, TimeUnit.MILLISECONDS);
deletedLatch.await(zkClient.getZooKeeper().getSessionTimeout() * 2L, TimeUnit.MILLISECONDS);
if (!deleted) {
throw new SolrException(
ErrorCode.SERVER_ERROR,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1025,7 +1025,6 @@ public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
// TODO Should a failure retrieving any piece of info mark the overall request as a failure? Is
// there a core set of values that are required to make a response here useful?
/** Used for showing statistics and progress information. */
@SuppressWarnings("NarrowCalculation")
private NamedList<Object> getReplicationDetails(
SolrQueryResponse rsp, boolean showFollowerDetails) {
NamedList<Object> details = new SimpleOrderedMap<>();
Expand Down Expand Up @@ -1126,7 +1125,7 @@ private NamedList<Object> getReplicationDetails(
currFileSizeDownloaded = (Long) currentFile.get("bytesDownloaded");
bytesDownloaded += currFileSizeDownloaded;
if (currFileSize > 0)
percentDownloaded = (currFileSizeDownloaded * 100) / currFileSize;
percentDownloaded = (float) (currFileSizeDownloaded * 100) / currFileSize;
}
}
follower.add("filesDownloaded", filesDownloaded);
Expand All @@ -1146,7 +1145,7 @@ private NamedList<Object> getReplicationDetails(
((bytesToDownload - bytesDownloaded) * elapsed) / bytesDownloaded;
float totalPercent = 0;
long downloadSpeed = 0;
if (bytesToDownload > 0) totalPercent = (bytesDownloaded * 100) / bytesToDownload;
if (bytesToDownload > 0) totalPercent = (float) (bytesDownloaded * 100) / bytesToDownload;
if (elapsed > 0) downloadSpeed = (bytesDownloaded / elapsed);
if (currFile != null) follower.add("currentFile", currFile);
follower.add("currentFileSize", NumberUtils.readableSize(currFileSize));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,6 @@ private void estimateNorms(Map<String, Object> result) throws IOException {
result.put(NORMS, stats);
}

@SuppressWarnings("LongDoubleConversion")
private void estimatePoints(Map<String, Object> result) throws IOException {
log.info("- estimating points...");
Map<String, Map<String, Object>> stats = new HashMap<>();
Expand All @@ -372,7 +371,8 @@ private void estimatePoints(Map<String, Object> result) throws IOException {
(SummaryStatistics)
perField.computeIfAbsent("lengths", s -> new MapWriterSummaryStatistics());
lengthSummary.addValue(
values.size() * values.getBytesPerDimension() * values.getNumIndexDimensions());
(double)
(values.size() * values.getBytesPerDimension() * values.getNumIndexDimensions()));
}
}
result.put(POINTS, stats);
Expand Down Expand Up @@ -513,7 +513,6 @@ private void estimateTerms(Map<String, Object> result) throws IOException {
result.put(TERMS, stats);
}

@SuppressWarnings("LongDoubleConversion")
private void estimateTermStats(
String field, Terms terms, Map<String, Map<String, Object>> stats, boolean isSampling)
throws IOException {
Expand Down Expand Up @@ -550,12 +549,12 @@ private void estimateTermStats(
for (int i = 0; i < samplingStep; i++) {
lengthSummary.addValue(term.length);
docFreqSummary.addValue(termsEnum.docFreq());
totalFreqSummary.addValue(termsEnum.totalTermFreq());
totalFreqSummary.addValue((double) termsEnum.totalTermFreq());
}
} else {
lengthSummary.addValue(term.length);
docFreqSummary.addValue(termsEnum.docFreq());
totalFreqSummary.addValue(termsEnum.totalTermFreq());
totalFreqSummary.addValue((double) termsEnum.totalTermFreq());
}
if (terms.hasPayloads()) {
postings = termsEnum.postings(postings, PostingsEnum.ALL);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -949,11 +949,8 @@ enum QType {
RANGE
}

@SuppressWarnings("StaticAssignmentOfThrowable")
static final RuntimeException unknownField = new RuntimeException("UnknownField");

static {
unknownField.fillInStackTrace();
static RuntimeException unknownField() {
return new RuntimeException("UnknownField");
}

/**
Expand Down Expand Up @@ -1193,7 +1190,7 @@ protected Query getAliasedQuery() throws SyntaxError {
if (exceptions) {
FieldType ft = schema.getFieldTypeNoEx(field);
if (ft == null && null == MagicFieldName.get(field)) {
throw unknownField;
throw unknownField();
}
}

Expand Down Expand Up @@ -1268,7 +1265,7 @@ && allSameQueryStructure(lst)) {
if (exceptions) {
FieldType ft = schema.getFieldTypeNoEx(field);
if (ft == null && null == MagicFieldName.get(field)) {
throw unknownField;
throw unknownField();
}
}
return getQuery();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,13 +82,15 @@ public boolean getParseToEnd() {
return parseMultipleSources;
}

@SuppressWarnings("ErroneousBitwiseExpression")
@Override
@SuppressWarnings("ErroneousBitwiseExpression")
public Query parse() throws SyntaxError {
ValueSource vs = null;
List<ValueSource> lst = null;

for (; ; ) {
// @SuppressWarnings("ErroneousBitwiseExpression") is needed since
// FLAG_DEFAULT & ~FLAG_CONSUME_DELIMITER == 0
ValueSource valsource = parseValueSource(FLAG_DEFAULT & ~FLAG_CONSUME_DELIMITER);
sp.eatws();
if (!parseMultipleSources) {
Expand Down Expand Up @@ -337,6 +339,8 @@ public Query parseNestedQuery() throws SyntaxError {
*/
@SuppressWarnings("ErroneousBitwiseExpression")
protected ValueSource parseValueSource(boolean doConsumeDelimiter) throws SyntaxError {
// @SuppressWarnings("ErroneousBitwiseExpression") is needed since
// FLAG_DEFAULT & ~FLAG_CONSUME_DELIMITER == 0
return parseValueSource(
doConsumeDelimiter
? (FLAG_DEFAULT | FLAG_CONSUME_DELIMITER)
Expand Down
13 changes: 7 additions & 6 deletions solr/core/src/java/org/apache/solr/search/PointMerger.java
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ public void visit(int docID, byte[] packedValue) throws IOException {
count[pos] = 1;
} else {
// a new value we don't have room for
throw breakException;
throw breakException();
}
}
}
Expand Down Expand Up @@ -299,7 +299,7 @@ public void visit(int docID, byte[] packedValue) throws IOException {
count[pos] = 1;
} else {
// a new value we don't have room for
throw breakException;
throw breakException();
}
}
}
Expand Down Expand Up @@ -361,7 +361,7 @@ public void visit(int docID, byte[] packedValue) throws IOException {
count[pos] = 1;
} else {
// a new value we don't have room for
throw breakException;
throw breakException();
}
}
}
Expand Down Expand Up @@ -423,7 +423,7 @@ public void visit(int docID, byte[] packedValue) throws IOException {
count[pos] = 1;
} else {
// a new value we don't have room for
throw breakException;
throw breakException();
}
}
}
Expand Down Expand Up @@ -453,6 +453,7 @@ public synchronized Throwable fillInStackTrace() {
}
}

@SuppressWarnings("StaticAssignmentOfThrowable")
static BreakException breakException = new BreakException();
static BreakException breakException() {
return new BreakException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -327,9 +327,9 @@ abstract static class DoubleSortedSetDVAcc extends SortedSetDVAcc {
double[] result;
double initialValue;

@SuppressWarnings("LongDoubleConversion")
public DoubleSortedSetDVAcc(
FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
FacetContext fcontext, SchemaField sf, int numSlots, double initialValue)
throws IOException {
super(fcontext, sf, numSlots);
result = new double[numSlots];
this.initialValue = initialValue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,17 +169,16 @@ protected void setActualDocFreq(int termNum, int docFreq) {
maxTermCounts[termNum] = docFreq;
}

@SuppressWarnings("NarrowCalculation")
public long memSize() {
// can cache the mem size since it shouldn't change
if (memsz != 0) return memsz;
long sz = super.ramBytesUsed();
sz += 8 * 8 + 32; // local fields
sz += bigTerms.size() * 64;
sz += bigTerms.size() * 64L;
for (TopTerm tt : bigTerms.values()) {
sz += tt.memSize();
}
if (maxTermCounts != null) sz += maxTermCounts.length * 4;
if (maxTermCounts != null) sz += maxTermCounts.length * 4L;
memsz = sz;
return sz;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -608,13 +608,12 @@ public SolrParams parseParamsAndFillStreams(
static class MultipartRequestParser implements SolrRequestParser {
private final MultipartConfigElement multipartConfigElement;

@SuppressWarnings("NarrowCalculation")
public MultipartRequestParser(int uploadLimitKB) {
multipartConfigElement =
new MultipartConfigElement(
null, // temp dir (null=default)
-1, // maxFileSize (-1=none)
uploadLimitKB * 1024, // maxRequestSize
uploadLimitKB * 1024L, // maxRequestSize
100 * 1024); // fileSizeThreshold after which will go to disk
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,19 +168,18 @@ public class DocTermOrds implements Accountable {

// TODO: Why is indexedTermsArray not part of this?
/** Returns total bytes used. */
@SuppressWarnings("NarrowCalculation")
@Override
public long ramBytesUsed() {
// can cache the mem size since it shouldn't change
if (memsz != 0) return memsz;
long sz = 8 * 8 + 32; // local fields
if (index != null) sz += index.length * 4;
if (index != null) sz += index.length * 4L;
if (tnums != null) {
for (byte[] arr : tnums) if (arr != null) sz += arr.length;
}
if (indexedTermsArray != null) {
// assume 8 byte references?
sz += 8 + 8 + 8 + 8 + (indexedTermsArray.length << 3) + sizeOfIndexedStrings;
sz += 8 + 8 + 8 + 8 + ((long) indexedTermsArray.length << 3) + sizeOfIndexedStrings;
}
memsz = sz;
return sz;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -894,13 +894,12 @@ public BytesRef lookupOrd(int ord) {
}
}

@SuppressWarnings("NarrowCalculation")
@Override
public long ramBytesUsed() {
return bytes.ramBytesUsed()
+ termOrdToBytesOffset.ramBytesUsed()
+ docToTermOrd.ramBytesUsed()
+ 3 * RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ 3L * RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ Integer.BYTES;
}

Expand Down Expand Up @@ -1108,12 +1107,11 @@ public BytesRef binaryValue() {
};
}

@SuppressWarnings("NarrowCalculation")
@Override
public long ramBytesUsed() {
return bytes.ramBytesUsed()
+ docToOffset.ramBytesUsed()
+ 2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
+ 2L * RamUsageEstimator.NUM_BYTES_OBJECT_REF;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,14 +140,13 @@ static long estimate(Object obj) {
return primitiveEstimate(obj, 0L);
}

@SuppressWarnings("NarrowCalculation")
private static long primitiveEstimate(Object obj, long def) {
Class<?> clazz = obj.getClass();
if (clazz.isPrimitive()) {
return primitiveSizes.get(clazz);
}
if (obj instanceof String) {
return ((String) obj).length() * Character.BYTES;
return (long) ((String) obj).length() * Character.BYTES;
}
return def;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,11 @@ public ConcurrentLRUCache(
setRunCleanupThread(runCleanupThread);
}

@SuppressWarnings("NarrowCalculation")
public ConcurrentLRUCache(int size, int lowerWatermark) {
this(
size,
lowerWatermark,
(int) Math.floor((lowerWatermark + size) / 2),
(int) Math.floor((lowerWatermark + size) / 2.0),
(int) Math.ceil(0.75 * size),
false,
false,
Expand Down Expand Up @@ -646,7 +645,7 @@ private static class PQueue<K, V> extends PriorityQueue<CacheEntry<K, V>> {
myMaxSize = maxSz;
}

@SuppressWarnings({"unchecked", "NarrowCalculation"})
@SuppressWarnings("unchecked")
Iterable<CacheEntry<K, V>> getValues() {
return (Iterable) Collections.unmodifiableCollection(Arrays.asList(heap));
}
Expand Down Expand Up @@ -839,7 +838,6 @@ public Stats getStats() {
}

public static class Stats implements Accountable {
@SuppressWarnings("NarrowCalculation")
private static final long RAM_BYTES_USED =
// accounts for field refs
RamUsageEstimator.shallowSizeOfInstance(Stats.class)
Expand All @@ -848,7 +846,7 @@ public static class Stats implements Accountable {
6
* (RamUsageEstimator.NUM_BYTES_ARRAY_HEADER
+ RamUsageEstimator.primitiveSizes.get(long.class)
+ 2
+ 2L
* (RamUsageEstimator.NUM_BYTES_OBJECT_REF
+ RamUsageEstimator.primitiveSizes.get(long.class)))
+
Expand Down

0 comments on commit e0f9d53

Please sign in to comment.